nwo
stringlengths 5
58
| sha
stringlengths 40
40
| path
stringlengths 5
172
| language
stringclasses 1
value | identifier
stringlengths 1
100
| parameters
stringlengths 2
3.5k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
21.5k
| docstring
stringlengths 2
17k
| docstring_summary
stringlengths 0
6.58k
| docstring_tokens
sequence | function
stringlengths 35
55.6k
| function_tokens
sequence | url
stringlengths 89
269
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Dieterbe/anthracite | 10d5b54e21a79aa0abc66d638828f0f251beacb5 | bottle.py | python | Route.prepare | (self) | Do all on-demand work immediately (useful for debugging). | Do all on-demand work immediately (useful for debugging). | [
"Do",
"all",
"on",
"-",
"demand",
"work",
"immediately",
"(",
"useful",
"for",
"debugging",
")",
"."
] | def prepare(self):
''' Do all on-demand work immediately (useful for debugging).'''
self.call | [
"def",
"prepare",
"(",
"self",
")",
":",
"self",
".",
"call"
] | https://github.com/Dieterbe/anthracite/blob/10d5b54e21a79aa0abc66d638828f0f251beacb5/bottle.py#L457-L459 |
||
atom-community/ide-python | c046f9c2421713b34baa22648235541c5bb284fe | dist/debugger/VendorLib/vs-py-debugger/pythonFiles/completion.py | python | JediCompletion._get_call_signatures_with_args | (self, script) | return _signatures | Extract call signatures from jedi.api.Script object in failsafe way.
Returns:
Array with dictionary | Extract call signatures from jedi.api.Script object in failsafe way. | [
"Extract",
"call",
"signatures",
"from",
"jedi",
".",
"api",
".",
"Script",
"object",
"in",
"failsafe",
"way",
"."
] | def _get_call_signatures_with_args(self, script):
"""Extract call signatures from jedi.api.Script object in failsafe way.
Returns:
Array with dictionary
"""
_signatures = []
try:
call_signatures = script.call_signatures()
except KeyError:
call_signatures = []
for signature in call_signatures:
sig = {"name": "", "description": "", "docstring": "",
"paramindex": 0, "params": [], "bracketstart": []}
sig["description"] = signature.description
try:
sig["docstring"] = signature.docstring()
sig["raw_docstring"] = signature.docstring(raw=True)
except Exception:
sig["docstring"] = ''
sig["raw_docstring"] = ''
sig["name"] = signature.name
sig["paramindex"] = signature.index
sig["bracketstart"].append(signature.index)
_signatures.append(sig)
for pos, param in enumerate(signature.params):
if not param.name:
continue
name = self._get_param_name(param)
if param.name == 'self' and pos == 0:
continue
value = self._get_param_value(param)
paramDocstring = ''
try:
paramDocstring = param.docstring()
except Exception:
paramDocstring = ''
sig["params"].append({"name": name, "value": value, "docstring": paramDocstring, "description": param.description})
return _signatures | [
"def",
"_get_call_signatures_with_args",
"(",
"self",
",",
"script",
")",
":",
"_signatures",
"=",
"[",
"]",
"try",
":",
"call_signatures",
"=",
"script",
".",
"call_signatures",
"(",
")",
"except",
"KeyError",
":",
"call_signatures",
"=",
"[",
"]",
"for",
"signature",
"in",
"call_signatures",
":",
"sig",
"=",
"{",
"\"name\"",
":",
"\"\"",
",",
"\"description\"",
":",
"\"\"",
",",
"\"docstring\"",
":",
"\"\"",
",",
"\"paramindex\"",
":",
"0",
",",
"\"params\"",
":",
"[",
"]",
",",
"\"bracketstart\"",
":",
"[",
"]",
"}",
"sig",
"[",
"\"description\"",
"]",
"=",
"signature",
".",
"description",
"try",
":",
"sig",
"[",
"\"docstring\"",
"]",
"=",
"signature",
".",
"docstring",
"(",
")",
"sig",
"[",
"\"raw_docstring\"",
"]",
"=",
"signature",
".",
"docstring",
"(",
"raw",
"=",
"True",
")",
"except",
"Exception",
":",
"sig",
"[",
"\"docstring\"",
"]",
"=",
"''",
"sig",
"[",
"\"raw_docstring\"",
"]",
"=",
"''",
"sig",
"[",
"\"name\"",
"]",
"=",
"signature",
".",
"name",
"sig",
"[",
"\"paramindex\"",
"]",
"=",
"signature",
".",
"index",
"sig",
"[",
"\"bracketstart\"",
"]",
".",
"append",
"(",
"signature",
".",
"index",
")",
"_signatures",
".",
"append",
"(",
"sig",
")",
"for",
"pos",
",",
"param",
"in",
"enumerate",
"(",
"signature",
".",
"params",
")",
":",
"if",
"not",
"param",
".",
"name",
":",
"continue",
"name",
"=",
"self",
".",
"_get_param_name",
"(",
"param",
")",
"if",
"param",
".",
"name",
"==",
"'self'",
"and",
"pos",
"==",
"0",
":",
"continue",
"value",
"=",
"self",
".",
"_get_param_value",
"(",
"param",
")",
"paramDocstring",
"=",
"''",
"try",
":",
"paramDocstring",
"=",
"param",
".",
"docstring",
"(",
")",
"except",
"Exception",
":",
"paramDocstring",
"=",
"''",
"sig",
"[",
"\"params\"",
"]",
".",
"append",
"(",
"{",
"\"name\"",
":",
"name",
",",
"\"value\"",
":",
"value",
",",
"\"docstring\"",
":",
"paramDocstring",
",",
"\"description\"",
":",
"param",
".",
"description",
"}",
")",
"return",
"_signatures"
] | https://github.com/atom-community/ide-python/blob/c046f9c2421713b34baa22648235541c5bb284fe/dist/debugger/VendorLib/vs-py-debugger/pythonFiles/completion.py#L133-L176 |
|
triaquae/MadKing | 3bec8bf972335535818c669e93c95de01e31edd6 | assets/myauth.py | python | UserProfile.has_module_perms | (self, app_label) | return True | Does the user have permissions to view the app `app_label`? | Does the user have permissions to view the app `app_label`? | [
"Does",
"the",
"user",
"have",
"permissions",
"to",
"view",
"the",
"app",
"app_label",
"?"
] | def has_module_perms(self, app_label):
"Does the user have permissions to view the app `app_label`?"
# Simplest possible answer: Yes, always
return True | [
"def",
"has_module_perms",
"(",
"self",
",",
"app_label",
")",
":",
"# Simplest possible answer: Yes, always",
"return",
"True"
] | https://github.com/triaquae/MadKing/blob/3bec8bf972335535818c669e93c95de01e31edd6/assets/myauth.py#L102-L105 |
|
hotosm/tasking-manager | 1a7b02c6ccd431029a96d709d4d786c83cb37f5e | backend/services/organisation_service.py | python | OrganisationService.can_user_manage_organisation | (organisation_id: int, user_id: int) | Check that the user is an admin for the org or a global admin | Check that the user is an admin for the org or a global admin | [
"Check",
"that",
"the",
"user",
"is",
"an",
"admin",
"for",
"the",
"org",
"or",
"a",
"global",
"admin"
] | def can_user_manage_organisation(organisation_id: int, user_id: int):
""" Check that the user is an admin for the org or a global admin"""
if UserService.is_user_an_admin(user_id):
return True
else:
return OrganisationService.is_user_an_org_manager(organisation_id, user_id) | [
"def",
"can_user_manage_organisation",
"(",
"organisation_id",
":",
"int",
",",
"user_id",
":",
"int",
")",
":",
"if",
"UserService",
".",
"is_user_an_admin",
"(",
"user_id",
")",
":",
"return",
"True",
"else",
":",
"return",
"OrganisationService",
".",
"is_user_an_org_manager",
"(",
"organisation_id",
",",
"user_id",
")"
] | https://github.com/hotosm/tasking-manager/blob/1a7b02c6ccd431029a96d709d4d786c83cb37f5e/backend/services/organisation_service.py#L258-L263 |
||
rapidpro/rapidpro | 8b6e58221fff967145f0b3411d85bcc15a0d3e72 | temba/contacts/search/omnibox.py | python | omnibox_query | (org, **kwargs) | return omnibox_mixed_search(org, search, types) | Performs a omnibox query based on the given arguments | Performs a omnibox query based on the given arguments | [
"Performs",
"a",
"omnibox",
"query",
"based",
"on",
"the",
"given",
"arguments"
] | def omnibox_query(org, **kwargs):
"""
Performs a omnibox query based on the given arguments
"""
# determine what type of group/contact/URN lookup is being requested
contact_uuids = kwargs.get("c", None) # contacts with ids
message_ids = kwargs.get("m", None) # contacts with message ids
label_id = kwargs.get("l", None) # contacts in flow step with UUID
group_uuids = kwargs.get("g", None) # groups with ids
urn_ids = kwargs.get("u", None) # URNs with ids
search = kwargs.get("search", None) # search of groups, contacts and URNs
types = list(kwargs.get("types", "")) # limit search to types (g | s | c | u)
# these lookups return a Contact queryset
if contact_uuids or message_ids or label_id:
qs = Contact.objects.filter(org=org, status=Contact.STATUS_ACTIVE, is_active=True)
if contact_uuids:
qs = qs.filter(uuid__in=contact_uuids.split(","))
elif message_ids:
qs = qs.filter(msgs__in=message_ids.split(","))
elif label_id:
label = Label.label_objects.get(pk=label_id)
qs = qs.filter(msgs__in=label.get_messages())
return qs.distinct().order_by("name")
# this lookup returns a ContactGroup queryset
elif group_uuids:
return ContactGroup.user_groups.filter(org=org, uuid__in=group_uuids.split(",")).order_by("name")
# this lookup returns a ContactURN queryset
elif urn_ids:
qs = ContactURN.objects.filter(org=org, id__in=urn_ids.split(",")).select_related("contact")
return qs.order_by("path")
# searching returns something which acts enough like a queryset to be paged
return omnibox_mixed_search(org, search, types) | [
"def",
"omnibox_query",
"(",
"org",
",",
"*",
"*",
"kwargs",
")",
":",
"# determine what type of group/contact/URN lookup is being requested",
"contact_uuids",
"=",
"kwargs",
".",
"get",
"(",
"\"c\"",
",",
"None",
")",
"# contacts with ids",
"message_ids",
"=",
"kwargs",
".",
"get",
"(",
"\"m\"",
",",
"None",
")",
"# contacts with message ids",
"label_id",
"=",
"kwargs",
".",
"get",
"(",
"\"l\"",
",",
"None",
")",
"# contacts in flow step with UUID",
"group_uuids",
"=",
"kwargs",
".",
"get",
"(",
"\"g\"",
",",
"None",
")",
"# groups with ids",
"urn_ids",
"=",
"kwargs",
".",
"get",
"(",
"\"u\"",
",",
"None",
")",
"# URNs with ids",
"search",
"=",
"kwargs",
".",
"get",
"(",
"\"search\"",
",",
"None",
")",
"# search of groups, contacts and URNs",
"types",
"=",
"list",
"(",
"kwargs",
".",
"get",
"(",
"\"types\"",
",",
"\"\"",
")",
")",
"# limit search to types (g | s | c | u)",
"# these lookups return a Contact queryset",
"if",
"contact_uuids",
"or",
"message_ids",
"or",
"label_id",
":",
"qs",
"=",
"Contact",
".",
"objects",
".",
"filter",
"(",
"org",
"=",
"org",
",",
"status",
"=",
"Contact",
".",
"STATUS_ACTIVE",
",",
"is_active",
"=",
"True",
")",
"if",
"contact_uuids",
":",
"qs",
"=",
"qs",
".",
"filter",
"(",
"uuid__in",
"=",
"contact_uuids",
".",
"split",
"(",
"\",\"",
")",
")",
"elif",
"message_ids",
":",
"qs",
"=",
"qs",
".",
"filter",
"(",
"msgs__in",
"=",
"message_ids",
".",
"split",
"(",
"\",\"",
")",
")",
"elif",
"label_id",
":",
"label",
"=",
"Label",
".",
"label_objects",
".",
"get",
"(",
"pk",
"=",
"label_id",
")",
"qs",
"=",
"qs",
".",
"filter",
"(",
"msgs__in",
"=",
"label",
".",
"get_messages",
"(",
")",
")",
"return",
"qs",
".",
"distinct",
"(",
")",
".",
"order_by",
"(",
"\"name\"",
")",
"# this lookup returns a ContactGroup queryset",
"elif",
"group_uuids",
":",
"return",
"ContactGroup",
".",
"user_groups",
".",
"filter",
"(",
"org",
"=",
"org",
",",
"uuid__in",
"=",
"group_uuids",
".",
"split",
"(",
"\",\"",
")",
")",
".",
"order_by",
"(",
"\"name\"",
")",
"# this lookup returns a ContactURN queryset",
"elif",
"urn_ids",
":",
"qs",
"=",
"ContactURN",
".",
"objects",
".",
"filter",
"(",
"org",
"=",
"org",
",",
"id__in",
"=",
"urn_ids",
".",
"split",
"(",
"\",\"",
")",
")",
".",
"select_related",
"(",
"\"contact\"",
")",
"return",
"qs",
".",
"order_by",
"(",
"\"path\"",
")",
"# searching returns something which acts enough like a queryset to be paged",
"return",
"omnibox_mixed_search",
"(",
"org",
",",
"search",
",",
"types",
")"
] | https://github.com/rapidpro/rapidpro/blob/8b6e58221fff967145f0b3411d85bcc15a0d3e72/temba/contacts/search/omnibox.py#L21-L60 |
|
odoo/odoo | 8de8c196a137f4ebbf67d7c7c83fee36f873f5c8 | odoo/models.py | python | BaseModel._register_hook | (self) | stuff to do right after the registry is built | stuff to do right after the registry is built | [
"stuff",
"to",
"do",
"right",
"after",
"the",
"registry",
"is",
"built"
] | def _register_hook(self):
""" stuff to do right after the registry is built """
pass | [
"def",
"_register_hook",
"(",
"self",
")",
":",
"pass"
] | https://github.com/odoo/odoo/blob/8de8c196a137f4ebbf67d7c7c83fee36f873f5c8/odoo/models.py#L5070-L5072 |
||
mhils/HoneyProxy | 3772bf2317ccac0c91017208af5fe97d88fea827 | libhproxy/twistedfallback.py | python | _Constant.__repr__ | (self) | return "<%s=%s>" % (self._container.__name__, self.name) | Return text identifying both which constant this is and which collection
it belongs to. | Return text identifying both which constant this is and which collection
it belongs to. | [
"Return",
"text",
"identifying",
"both",
"which",
"constant",
"this",
"is",
"and",
"which",
"collection",
"it",
"belongs",
"to",
"."
] | def __repr__(self):
"""
Return text identifying both which constant this is and which collection
it belongs to.
"""
return "<%s=%s>" % (self._container.__name__, self.name) | [
"def",
"__repr__",
"(",
"self",
")",
":",
"return",
"\"<%s=%s>\"",
"%",
"(",
"self",
".",
"_container",
".",
"__name__",
",",
"self",
".",
"name",
")"
] | https://github.com/mhils/HoneyProxy/blob/3772bf2317ccac0c91017208af5fe97d88fea827/libhproxy/twistedfallback.py#L31-L36 |
|
GeoNode/geonode | 326d70153ad79e1ed831d46a0e3b239d422757a8 | geonode/monitoring/models.py | python | RequestEvent._get_geonode_resources | (cls, request) | return resources | Return serialized resources affected by request | Return serialized resources affected by request | [
"Return",
"serialized",
"resources",
"affected",
"by",
"request"
] | def _get_geonode_resources(cls, request):
"""
Return serialized resources affected by request
"""
rqmeta = getattr(request, '_monitoring', {})
events = rqmeta['events']
resources = []
# for type_name in 'layer map document style'.split():
# res = rqmeta['resources'].get(type_name) or []
# resources.extend(cls._get_resources(type_name, res))
for evt_type, res_type, res_name, res_id in events:
resources.extend(cls._get_or_create_resources(res_name, res_type, res_id))
return resources | [
"def",
"_get_geonode_resources",
"(",
"cls",
",",
"request",
")",
":",
"rqmeta",
"=",
"getattr",
"(",
"request",
",",
"'_monitoring'",
",",
"{",
"}",
")",
"events",
"=",
"rqmeta",
"[",
"'events'",
"]",
"resources",
"=",
"[",
"]",
"# for type_name in 'layer map document style'.split():",
"# res = rqmeta['resources'].get(type_name) or []",
"# resources.extend(cls._get_resources(type_name, res))",
"for",
"evt_type",
",",
"res_type",
",",
"res_name",
",",
"res_id",
"in",
"events",
":",
"resources",
".",
"extend",
"(",
"cls",
".",
"_get_or_create_resources",
"(",
"res_name",
",",
"res_type",
",",
"res_id",
")",
")",
"return",
"resources"
] | https://github.com/GeoNode/geonode/blob/326d70153ad79e1ed831d46a0e3b239d422757a8/geonode/monitoring/models.py#L513-L527 |
|
GoogleCloudPlatform/PerfKitExplorer | 9efa61015d50c25f6d753f0212ad3bf16876d496 | server/perfkit/explorer/samples_mart/label_manager.py | python | LabelManager.__init__ | (self, labels=None) | Initializes the class.
Args:
labels: A list of labels and associated values. | Initializes the class. | [
"Initializes",
"the",
"class",
"."
] | def __init__(self, labels=None):
"""Initializes the class.
Args:
labels: A list of labels and associated values.
"""
self.labels = labels or [] | [
"def",
"__init__",
"(",
"self",
",",
"labels",
"=",
"None",
")",
":",
"self",
".",
"labels",
"=",
"labels",
"or",
"[",
"]"
] | https://github.com/GoogleCloudPlatform/PerfKitExplorer/blob/9efa61015d50c25f6d753f0212ad3bf16876d496/server/perfkit/explorer/samples_mart/label_manager.py#L55-L61 |
||
xtk/X | 04c1aa856664a8517d23aefd94c470d47130aead | lib/pypng-0.0.9/code/iccp.py | python | RDcurv | (s) | return table | Convert ICC curveType. | Convert ICC curveType. | [
"Convert",
"ICC",
"curveType",
"."
] | def RDcurv(s):
"""Convert ICC curveType."""
# See [ICC 2001] 6.5.3
assert s[0:4] == 'curv'
count, = struct.unpack('>L', s[8:12])
if count == 0:
return dict(gamma=1)
table = struct.unpack('>%dH' % count, s[12:])
if count == 1:
return dict(gamma=table[0]*2**-8)
return table | [
"def",
"RDcurv",
"(",
"s",
")",
":",
"# See [ICC 2001] 6.5.3",
"assert",
"s",
"[",
"0",
":",
"4",
"]",
"==",
"'curv'",
"count",
",",
"=",
"struct",
".",
"unpack",
"(",
"'>L'",
",",
"s",
"[",
"8",
":",
"12",
"]",
")",
"if",
"count",
"==",
"0",
":",
"return",
"dict",
"(",
"gamma",
"=",
"1",
")",
"table",
"=",
"struct",
".",
"unpack",
"(",
"'>%dH'",
"%",
"count",
",",
"s",
"[",
"12",
":",
"]",
")",
"if",
"count",
"==",
"1",
":",
"return",
"dict",
"(",
"gamma",
"=",
"table",
"[",
"0",
"]",
"*",
"2",
"**",
"-",
"8",
")",
"return",
"table"
] | https://github.com/xtk/X/blob/04c1aa856664a8517d23aefd94c470d47130aead/lib/pypng-0.0.9/code/iccp.py#L477-L487 |
|
facebookarchive/nuclide | 2a2a0a642d136768b7d2a6d35a652dc5fb77d70a | pkg/nuclide-python-rpc/VendorLib/jedi/evaluate/filters.py | python | ParserTreeFilter.__init__ | (self, evaluator, context, node_context=None, until_position=None,
origin_scope=None) | node_context is an option to specify a second context for use cases
like the class mro where the parent class of a new name would be the
context, but for some type inference it's important to have a local
context of the other classes. | node_context is an option to specify a second context for use cases
like the class mro where the parent class of a new name would be the
context, but for some type inference it's important to have a local
context of the other classes. | [
"node_context",
"is",
"an",
"option",
"to",
"specify",
"a",
"second",
"context",
"for",
"use",
"cases",
"like",
"the",
"class",
"mro",
"where",
"the",
"parent",
"class",
"of",
"a",
"new",
"name",
"would",
"be",
"the",
"context",
"but",
"for",
"some",
"type",
"inference",
"it",
"s",
"important",
"to",
"have",
"a",
"local",
"context",
"of",
"the",
"other",
"classes",
"."
] | def __init__(self, evaluator, context, node_context=None, until_position=None,
origin_scope=None):
"""
node_context is an option to specify a second context for use cases
like the class mro where the parent class of a new name would be the
context, but for some type inference it's important to have a local
context of the other classes.
"""
if node_context is None:
node_context = context
super(ParserTreeFilter, self).__init__(context, node_context.tree_node)
self._node_context = node_context
self._origin_scope = origin_scope
self._until_position = until_position | [
"def",
"__init__",
"(",
"self",
",",
"evaluator",
",",
"context",
",",
"node_context",
"=",
"None",
",",
"until_position",
"=",
"None",
",",
"origin_scope",
"=",
"None",
")",
":",
"if",
"node_context",
"is",
"None",
":",
"node_context",
"=",
"context",
"super",
"(",
"ParserTreeFilter",
",",
"self",
")",
".",
"__init__",
"(",
"context",
",",
"node_context",
".",
"tree_node",
")",
"self",
".",
"_node_context",
"=",
"node_context",
"self",
".",
"_origin_scope",
"=",
"origin_scope",
"self",
".",
"_until_position",
"=",
"until_position"
] | https://github.com/facebookarchive/nuclide/blob/2a2a0a642d136768b7d2a6d35a652dc5fb77d70a/pkg/nuclide-python-rpc/VendorLib/jedi/evaluate/filters.py#L184-L197 |
||
stdlib-js/stdlib | e3c14dd9a7985ed1cd1cc80e83b6659aeabeb7df | lib/node_modules/@stdlib/math/special/abs/benchmark/python/numpy/benchmark.array_like_1d_contiguous_generic.py | python | print_version | () | Print the TAP version. | Print the TAP version. | [
"Print",
"the",
"TAP",
"version",
"."
] | def print_version():
"""Print the TAP version."""
print("TAP version 13") | [
"def",
"print_version",
"(",
")",
":",
"print",
"(",
"\"TAP version 13\"",
")"
] | https://github.com/stdlib-js/stdlib/blob/e3c14dd9a7985ed1cd1cc80e83b6659aeabeb7df/lib/node_modules/@stdlib/math/special/abs/benchmark/python/numpy/benchmark.array_like_1d_contiguous_generic.py#L32-L34 |
||
att/rcloud | 392489d6ef88ece10f5999a832fb729da01116f2 | rcloud.packages/rcloud.jupyter/inst/jupyter/jupyter_adapter.py | python | CellOutputCollector._update_display_id | (self, display_id, msg) | Update outputs with a given display_id | Update outputs with a given display_id | [
"Update",
"outputs",
"with",
"a",
"given",
"display_id"
] | def _update_display_id(self, display_id, msg):
"""Update outputs with a given display_id"""
if display_id not in self._display_id_map:
logging.debug("display id %r not in %s", display_id, self._display_id_map)
return
if msg['header']['msg_type'] == 'update_display_data':
msg['header']['msg_type'] = 'display_data'
try:
out = output_from_msg(msg)
except ValueError:
logging.error("unhandled iopub msg: " + msg['msg_type'])
return
for cell_idx, output_indices in self._display_id_map[display_id].items():
cell = self.nb['cells'][cell_idx]
outputs = cell['outputs']
for output_idx in output_indices:
outputs[output_idx]['data'] = out['data']
outputs[output_idx]['metadata'] = out['metadata'] | [
"def",
"_update_display_id",
"(",
"self",
",",
"display_id",
",",
"msg",
")",
":",
"if",
"display_id",
"not",
"in",
"self",
".",
"_display_id_map",
":",
"logging",
".",
"debug",
"(",
"\"display id %r not in %s\"",
",",
"display_id",
",",
"self",
".",
"_display_id_map",
")",
"return",
"if",
"msg",
"[",
"'header'",
"]",
"[",
"'msg_type'",
"]",
"==",
"'update_display_data'",
":",
"msg",
"[",
"'header'",
"]",
"[",
"'msg_type'",
"]",
"=",
"'display_data'",
"try",
":",
"out",
"=",
"output_from_msg",
"(",
"msg",
")",
"except",
"ValueError",
":",
"logging",
".",
"error",
"(",
"\"unhandled iopub msg: \"",
"+",
"msg",
"[",
"'msg_type'",
"]",
")",
"return",
"for",
"cell_idx",
",",
"output_indices",
"in",
"self",
".",
"_display_id_map",
"[",
"display_id",
"]",
".",
"items",
"(",
")",
":",
"cell",
"=",
"self",
".",
"nb",
"[",
"'cells'",
"]",
"[",
"cell_idx",
"]",
"outputs",
"=",
"cell",
"[",
"'outputs'",
"]",
"for",
"output_idx",
"in",
"output_indices",
":",
"outputs",
"[",
"output_idx",
"]",
"[",
"'data'",
"]",
"=",
"out",
"[",
"'data'",
"]",
"outputs",
"[",
"output_idx",
"]",
"[",
"'metadata'",
"]",
"=",
"out",
"[",
"'metadata'",
"]"
] | https://github.com/att/rcloud/blob/392489d6ef88ece10f5999a832fb729da01116f2/rcloud.packages/rcloud.jupyter/inst/jupyter/jupyter_adapter.py#L96-L116 |
||
replit-archive/jsrepl | 36d79b6288ca5d26208e8bade2a168c6ebcb2376 | extern/python/closured/lib/python2.7/telnetlib.py | python | Telnet.read_eager | (self) | return self.read_very_lazy() | Read readily available data.
Raise EOFError if connection closed and no cooked data
available. Return '' if no cooked data available otherwise.
Don't block unless in the midst of an IAC sequence. | Read readily available data. | [
"Read",
"readily",
"available",
"data",
"."
] | def read_eager(self):
"""Read readily available data.
Raise EOFError if connection closed and no cooked data
available. Return '' if no cooked data available otherwise.
Don't block unless in the midst of an IAC sequence.
"""
self.process_rawq()
while not self.cookedq and not self.eof and self.sock_avail():
self.fill_rawq()
self.process_rawq()
return self.read_very_lazy() | [
"def",
"read_eager",
"(",
"self",
")",
":",
"self",
".",
"process_rawq",
"(",
")",
"while",
"not",
"self",
".",
"cookedq",
"and",
"not",
"self",
".",
"eof",
"and",
"self",
".",
"sock_avail",
"(",
")",
":",
"self",
".",
"fill_rawq",
"(",
")",
"self",
".",
"process_rawq",
"(",
")",
"return",
"self",
".",
"read_very_lazy",
"(",
")"
] | https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/closured/lib/python2.7/telnetlib.py#L360-L372 |
|
replit-archive/jsrepl | 36d79b6288ca5d26208e8bade2a168c6ebcb2376 | extern/python/unclosured/lib/python2.7/mailbox.py | python | _singlefileMailbox.unlock | (self) | Unlock the mailbox if it is locked. | Unlock the mailbox if it is locked. | [
"Unlock",
"the",
"mailbox",
"if",
"it",
"is",
"locked",
"."
] | def unlock(self):
"""Unlock the mailbox if it is locked."""
if self._locked:
_unlock_file(self._file)
self._locked = False | [
"def",
"unlock",
"(",
"self",
")",
":",
"if",
"self",
".",
"_locked",
":",
"_unlock_file",
"(",
"self",
".",
"_file",
")",
"self",
".",
"_locked",
"=",
"False"
] | https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/unclosured/lib/python2.7/mailbox.py#L612-L616 |
||
babybuddy/babybuddy | acde3156c6de781f90a85d021eaf086b28a7a008 | babybuddy/middleware.py | python | update_en_us_date_formats | () | Update the datetime formats for the en-US locale. This is handled here and
not using `FORMAT_MODULE_PATH` because the processing of format modules
does not allow us to distinguish appropriately between en-US and en-GB
based on user settings. | Update the datetime formats for the en-US locale. This is handled here and
not using `FORMAT_MODULE_PATH` because the processing of format modules
does not allow us to distinguish appropriately between en-US and en-GB
based on user settings. | [
"Update",
"the",
"datetime",
"formats",
"for",
"the",
"en",
"-",
"US",
"locale",
".",
"This",
"is",
"handled",
"here",
"and",
"not",
"using",
"FORMAT_MODULE_PATH",
"because",
"the",
"processing",
"of",
"format",
"modules",
"does",
"not",
"allow",
"us",
"to",
"distinguish",
"appropriately",
"between",
"en",
"-",
"US",
"and",
"en",
"-",
"GB",
"based",
"on",
"user",
"settings",
"."
] | def update_en_us_date_formats():
"""
Update the datetime formats for the en-US locale. This is handled here and
not using `FORMAT_MODULE_PATH` because the processing of format modules
does not allow us to distinguish appropriately between en-US and en-GB
based on user settings.
"""
if settings.USE_24_HOUR_TIME_FORMAT:
formats_en_us.DATETIME_FORMAT = 'N j, Y, H:i:s'
custom_input_formats = [
'%m/%d/%Y %H:%M:%S', # '10/25/2006 14:30:59'
'%m/%d/%Y %H:%M', # '10/25/2006 14:30'
]
formats_en_us.SHORT_DATETIME_FORMAT = 'm/d/Y G:i:s'
formats_en_us.TIME_FORMAT = 'H:i:s'
else:
# These formats are added to support the locale style of Baby Buddy's
# frontend library, which uses momentjs.
custom_input_formats = [
'%m/%d/%Y %I:%M:%S %p', # '10/25/2006 2:30:59 PM'
'%m/%d/%Y %I:%M %p', # '10/25/2006 2:30 PM'
]
# Add custom "short" version of `MONTH_DAY_FORMAT`.
formats_en_us.SHORT_MONTH_DAY_FORMAT = 'M j'
# Append all other input formats from the base locale.
formats_en_us.DATETIME_INPUT_FORMATS = \
custom_input_formats + formats_en_us.DATETIME_INPUT_FORMATS | [
"def",
"update_en_us_date_formats",
"(",
")",
":",
"if",
"settings",
".",
"USE_24_HOUR_TIME_FORMAT",
":",
"formats_en_us",
".",
"DATETIME_FORMAT",
"=",
"'N j, Y, H:i:s'",
"custom_input_formats",
"=",
"[",
"'%m/%d/%Y %H:%M:%S'",
",",
"# '10/25/2006 14:30:59'",
"'%m/%d/%Y %H:%M'",
",",
"# '10/25/2006 14:30'",
"]",
"formats_en_us",
".",
"SHORT_DATETIME_FORMAT",
"=",
"'m/d/Y G:i:s'",
"formats_en_us",
".",
"TIME_FORMAT",
"=",
"'H:i:s'",
"else",
":",
"# These formats are added to support the locale style of Baby Buddy's",
"# frontend library, which uses momentjs.",
"custom_input_formats",
"=",
"[",
"'%m/%d/%Y %I:%M:%S %p'",
",",
"# '10/25/2006 2:30:59 PM'",
"'%m/%d/%Y %I:%M %p'",
",",
"# '10/25/2006 2:30 PM'",
"]",
"# Add custom \"short\" version of `MONTH_DAY_FORMAT`.",
"formats_en_us",
".",
"SHORT_MONTH_DAY_FORMAT",
"=",
"'M j'",
"# Append all other input formats from the base locale.",
"formats_en_us",
".",
"DATETIME_INPUT_FORMATS",
"=",
"custom_input_formats",
"+",
"formats_en_us",
".",
"DATETIME_INPUT_FORMATS"
] | https://github.com/babybuddy/babybuddy/blob/acde3156c6de781f90a85d021eaf086b28a7a008/babybuddy/middleware.py#L11-L39 |
||
Nexedi/erp5 | 44df1959c0e21576cf5e9803d602d95efb4b695b | bt5/erp5_big_file/ModuleComponentTemplateItem/portal_components/module.erp5.BTreeData.py | python | BTreeData.defragment | (self, batch_size=100, resume_at=None) | Merge contiguous chunks up to max_chunk_size.
This method is a generator, allowing caller to define a stop condition
(time, number of calls, ...). Yield value is an opaque, small and
serialisable value which only use is to be passed to resume_at
parameter.
batch_size (int)
Yield every this many internal operations. Allows trading overhead
for precision. This value may be adjusted on-the-fly by giving the
new value as parameter of the "send" method on generator (see
Python doc on generators).
resume_at (opaque)
If provided, resume interrupted processing at that point. | Merge contiguous chunks up to max_chunk_size. | [
"Merge",
"contiguous",
"chunks",
"up",
"to",
"max_chunk_size",
"."
] | def defragment(self, batch_size=100, resume_at=None):
"""
Merge contiguous chunks up to max_chunk_size.
This method is a generator, allowing caller to define a stop condition
(time, number of calls, ...). Yield value is an opaque, small and
serialisable value which only use is to be passed to resume_at
parameter.
batch_size (int)
Yield every this many internal operations. Allows trading overhead
for precision. This value may be adjusted on-the-fly by giving the
new value as parameter of the "send" method on generator (see
Python doc on generators).
resume_at (opaque)
If provided, resume interrupted processing at that point.
"""
chunk_size = self._max_chunk_size
key = resume_at or 0
tree = self._tree
for iteration in itertools.count(1):
try:
key = tree.minKey(key)
except ValueError:
return
if iteration % batch_size == 0:
new_batch_size = yield key
if new_batch_size:
batch_size = new_batch_size
chunk = tree[key]
chunk_len = len(chunk.value)
remainder = chunk_size - chunk_len
if remainder <= 0:
# Current entry is large enough, go to next one.
key += 1
continue
end_offset = key + chunk_len
try:
next_key = tree.minKey(key + 1)
except ValueError:
# No next entry, defrag is over.
return
if next_key != end_offset:
# There is a hole between current entry end and next one, do
# not concatenate and move on with next entry.
assert next_key > end_offset, (key, chunk_len, next_key)
key = next_key
continue
next_chunk = tree[next_key]
next_chunk_len = len(next_chunk.value)
if next_chunk_len >= chunk_size:
# Next entry is larger than target size, do not concatenate and
# go to the entry after that.
key = next_key + 1
continue
# Concatenate current entry and next one.
chunk.value += next_chunk.value[:remainder]
del tree[next_key]
if next_chunk_len > remainder:
key = next_key + remainder
# Concatenation result is larger than target size, split into
# a new entry.
next_chunk.value = next_chunk.value[remainder:]
tree[key] = next_chunk | [
"def",
"defragment",
"(",
"self",
",",
"batch_size",
"=",
"100",
",",
"resume_at",
"=",
"None",
")",
":",
"chunk_size",
"=",
"self",
".",
"_max_chunk_size",
"key",
"=",
"resume_at",
"or",
"0",
"tree",
"=",
"self",
".",
"_tree",
"for",
"iteration",
"in",
"itertools",
".",
"count",
"(",
"1",
")",
":",
"try",
":",
"key",
"=",
"tree",
".",
"minKey",
"(",
"key",
")",
"except",
"ValueError",
":",
"return",
"if",
"iteration",
"%",
"batch_size",
"==",
"0",
":",
"new_batch_size",
"=",
"yield",
"key",
"if",
"new_batch_size",
":",
"batch_size",
"=",
"new_batch_size",
"chunk",
"=",
"tree",
"[",
"key",
"]",
"chunk_len",
"=",
"len",
"(",
"chunk",
".",
"value",
")",
"remainder",
"=",
"chunk_size",
"-",
"chunk_len",
"if",
"remainder",
"<=",
"0",
":",
"# Current entry is large enough, go to next one.",
"key",
"+=",
"1",
"continue",
"end_offset",
"=",
"key",
"+",
"chunk_len",
"try",
":",
"next_key",
"=",
"tree",
".",
"minKey",
"(",
"key",
"+",
"1",
")",
"except",
"ValueError",
":",
"# No next entry, defrag is over.",
"return",
"if",
"next_key",
"!=",
"end_offset",
":",
"# There is a hole between current entry end and next one, do",
"# not concatenate and move on with next entry.",
"assert",
"next_key",
">",
"end_offset",
",",
"(",
"key",
",",
"chunk_len",
",",
"next_key",
")",
"key",
"=",
"next_key",
"continue",
"next_chunk",
"=",
"tree",
"[",
"next_key",
"]",
"next_chunk_len",
"=",
"len",
"(",
"next_chunk",
".",
"value",
")",
"if",
"next_chunk_len",
">=",
"chunk_size",
":",
"# Next entry is larger than target size, do not concatenate and",
"# go to the entry after that.",
"key",
"=",
"next_key",
"+",
"1",
"continue",
"# Concatenate current entry and next one.",
"chunk",
".",
"value",
"+=",
"next_chunk",
".",
"value",
"[",
":",
"remainder",
"]",
"del",
"tree",
"[",
"next_key",
"]",
"if",
"next_chunk_len",
">",
"remainder",
":",
"key",
"=",
"next_key",
"+",
"remainder",
"# Concatenation result is larger than target size, split into",
"# a new entry.",
"next_chunk",
".",
"value",
"=",
"next_chunk",
".",
"value",
"[",
"remainder",
":",
"]",
"tree",
"[",
"key",
"]",
"=",
"next_chunk"
] | https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/bt5/erp5_big_file/ModuleComponentTemplateItem/portal_components/module.erp5.BTreeData.py#L248-L312 |
||
catmaid/CATMAID | 9f3312f2eacfc6fab48e4c6f1bd24672cc9c9ecf | django/applications/catmaid/control/deeplink.py | python | DeepLinkByIdSelector.delete | (self, request:Request, project_id, link_id) | Delete a deep-links available to the client.
---
serializer: DeepLinkSerializer | Delete a deep-links available to the client.
---
serializer: DeepLinkSerializer | [
"Delete",
"a",
"deep",
"-",
"links",
"available",
"to",
"the",
"client",
".",
"---",
"serializer",
":",
"DeepLinkSerializer"
] | def delete(self, request:Request, project_id, link_id) -> Response:
"""Delete a deep-links available to the client.
---
serializer: DeepLinkSerializer
"""
try:
deep_link = DeepLink.objects.get(project_id=project_id, id=link_id)
can_edit_or_fail(request.user, deep_link.id, 'catmaid_deep_link')
deep_link.delete()
return Response({
'deleted_id': link_id
})
except DeepLink.DoesNotExist:
return Response('Link not found', status=status.HTTP_404_NOT_FOUND) | [
"def",
"delete",
"(",
"self",
",",
"request",
":",
"Request",
",",
"project_id",
",",
"link_id",
")",
"->",
"Response",
":",
"try",
":",
"deep_link",
"=",
"DeepLink",
".",
"objects",
".",
"get",
"(",
"project_id",
"=",
"project_id",
",",
"id",
"=",
"link_id",
")",
"can_edit_or_fail",
"(",
"request",
".",
"user",
",",
"deep_link",
".",
"id",
",",
"'catmaid_deep_link'",
")",
"deep_link",
".",
"delete",
"(",
")",
"return",
"Response",
"(",
"{",
"'deleted_id'",
":",
"link_id",
"}",
")",
"except",
"DeepLink",
".",
"DoesNotExist",
":",
"return",
"Response",
"(",
"'Link not found'",
",",
"status",
"=",
"status",
".",
"HTTP_404_NOT_FOUND",
")"
] | https://github.com/catmaid/CATMAID/blob/9f3312f2eacfc6fab48e4c6f1bd24672cc9c9ecf/django/applications/catmaid/control/deeplink.py#L300-L313 |
||
xl7dev/BurpSuite | d1d4bd4981a87f2f4c0c9744ad7c476336c813da | Extender/burp-protobuf-decoder/Lib/google/protobuf/descriptor.py | python | ServiceDescriptor.FindMethodByName | (self, name) | return None | Searches for the specified method, and returns its descriptor. | Searches for the specified method, and returns its descriptor. | [
"Searches",
"for",
"the",
"specified",
"method",
"and",
"returns",
"its",
"descriptor",
"."
] | def FindMethodByName(self, name):
"""Searches for the specified method, and returns its descriptor."""
for method in self.methods:
if name == method.name:
return method
return None | [
"def",
"FindMethodByName",
"(",
"self",
",",
"name",
")",
":",
"for",
"method",
"in",
"self",
".",
"methods",
":",
"if",
"name",
"==",
"method",
".",
"name",
":",
"return",
"method",
"return",
"None"
] | https://github.com/xl7dev/BurpSuite/blob/d1d4bd4981a87f2f4c0c9744ad7c476336c813da/Extender/burp-protobuf-decoder/Lib/google/protobuf/descriptor.py#L597-L602 |
|
ansible/awx | 15c7a3f85b5e948f011c67111c4433a38c4544e9 | awx/api/views/root.py | python | ApiV2ConfigView.get | (self, request, format=None) | return Response(data) | Return various sitewide configuration settings | Return various sitewide configuration settings | [
"Return",
"various",
"sitewide",
"configuration",
"settings"
] | def get(self, request, format=None):
'''Return various sitewide configuration settings'''
license_data = get_licenser().validate()
if not license_data.get('valid_key', False):
license_data = {}
pendo_state = settings.PENDO_TRACKING_STATE if settings.PENDO_TRACKING_STATE in ('off', 'anonymous', 'detailed') else 'off'
data = dict(
time_zone=settings.TIME_ZONE,
license_info=license_data,
version=get_awx_version(),
eula=render_to_string("eula.md") if license_data.get('license_type', 'UNLICENSED') != 'open' else '',
analytics_status=pendo_state,
analytics_collectors=all_collectors(),
become_methods=PRIVILEGE_ESCALATION_METHODS,
)
# If LDAP is enabled, user_ldap_fields will return a list of field
# names that are managed by LDAP and should be read-only for users with
# a non-empty ldap_dn attribute.
if getattr(settings, 'AUTH_LDAP_SERVER_URI', None):
user_ldap_fields = ['username', 'password']
user_ldap_fields.extend(getattr(settings, 'AUTH_LDAP_USER_ATTR_MAP', {}).keys())
user_ldap_fields.extend(getattr(settings, 'AUTH_LDAP_USER_FLAGS_BY_GROUP', {}).keys())
data['user_ldap_fields'] = user_ldap_fields
if (
request.user.is_superuser
or request.user.is_system_auditor
or Organization.accessible_objects(request.user, 'admin_role').exists()
or Organization.accessible_objects(request.user, 'auditor_role').exists()
or Organization.accessible_objects(request.user, 'project_admin_role').exists()
):
data.update(
dict(
project_base_dir=settings.PROJECTS_ROOT,
project_local_paths=Project.get_local_path_choices(),
custom_virtualenvs=get_custom_venv_choices(),
)
)
elif JobTemplate.accessible_objects(request.user, 'admin_role').exists():
data['custom_virtualenvs'] = get_custom_venv_choices()
return Response(data) | [
"def",
"get",
"(",
"self",
",",
"request",
",",
"format",
"=",
"None",
")",
":",
"license_data",
"=",
"get_licenser",
"(",
")",
".",
"validate",
"(",
")",
"if",
"not",
"license_data",
".",
"get",
"(",
"'valid_key'",
",",
"False",
")",
":",
"license_data",
"=",
"{",
"}",
"pendo_state",
"=",
"settings",
".",
"PENDO_TRACKING_STATE",
"if",
"settings",
".",
"PENDO_TRACKING_STATE",
"in",
"(",
"'off'",
",",
"'anonymous'",
",",
"'detailed'",
")",
"else",
"'off'",
"data",
"=",
"dict",
"(",
"time_zone",
"=",
"settings",
".",
"TIME_ZONE",
",",
"license_info",
"=",
"license_data",
",",
"version",
"=",
"get_awx_version",
"(",
")",
",",
"eula",
"=",
"render_to_string",
"(",
"\"eula.md\"",
")",
"if",
"license_data",
".",
"get",
"(",
"'license_type'",
",",
"'UNLICENSED'",
")",
"!=",
"'open'",
"else",
"''",
",",
"analytics_status",
"=",
"pendo_state",
",",
"analytics_collectors",
"=",
"all_collectors",
"(",
")",
",",
"become_methods",
"=",
"PRIVILEGE_ESCALATION_METHODS",
",",
")",
"# If LDAP is enabled, user_ldap_fields will return a list of field",
"# names that are managed by LDAP and should be read-only for users with",
"# a non-empty ldap_dn attribute.",
"if",
"getattr",
"(",
"settings",
",",
"'AUTH_LDAP_SERVER_URI'",
",",
"None",
")",
":",
"user_ldap_fields",
"=",
"[",
"'username'",
",",
"'password'",
"]",
"user_ldap_fields",
".",
"extend",
"(",
"getattr",
"(",
"settings",
",",
"'AUTH_LDAP_USER_ATTR_MAP'",
",",
"{",
"}",
")",
".",
"keys",
"(",
")",
")",
"user_ldap_fields",
".",
"extend",
"(",
"getattr",
"(",
"settings",
",",
"'AUTH_LDAP_USER_FLAGS_BY_GROUP'",
",",
"{",
"}",
")",
".",
"keys",
"(",
")",
")",
"data",
"[",
"'user_ldap_fields'",
"]",
"=",
"user_ldap_fields",
"if",
"(",
"request",
".",
"user",
".",
"is_superuser",
"or",
"request",
".",
"user",
".",
"is_system_auditor",
"or",
"Organization",
".",
"accessible_objects",
"(",
"request",
".",
"user",
",",
"'admin_role'",
")",
".",
"exists",
"(",
")",
"or",
"Organization",
".",
"accessible_objects",
"(",
"request",
".",
"user",
",",
"'auditor_role'",
")",
".",
"exists",
"(",
")",
"or",
"Organization",
".",
"accessible_objects",
"(",
"request",
".",
"user",
",",
"'project_admin_role'",
")",
".",
"exists",
"(",
")",
")",
":",
"data",
".",
"update",
"(",
"dict",
"(",
"project_base_dir",
"=",
"settings",
".",
"PROJECTS_ROOT",
",",
"project_local_paths",
"=",
"Project",
".",
"get_local_path_choices",
"(",
")",
",",
"custom_virtualenvs",
"=",
"get_custom_venv_choices",
"(",
")",
",",
")",
")",
"elif",
"JobTemplate",
".",
"accessible_objects",
"(",
"request",
".",
"user",
",",
"'admin_role'",
")",
".",
"exists",
"(",
")",
":",
"data",
"[",
"'custom_virtualenvs'",
"]",
"=",
"get_custom_venv_choices",
"(",
")",
"return",
"Response",
"(",
"data",
")"
] | https://github.com/ansible/awx/blob/15c7a3f85b5e948f011c67111c4433a38c4544e9/awx/api/views/root.py#L271-L317 |
|
Southpaw-TACTIC/TACTIC | ba9b87aef0ee3b3ea51446f25b285ebbca06f62c | src/tactic/ui/panel/edit_wdg.py | python | EditWdg.add_hidden_inputs | (self, div) | TODO: docs ... what is this for??? | TODO: docs ... what is this for??? | [
"TODO",
":",
"docs",
"...",
"what",
"is",
"this",
"for???"
] | def add_hidden_inputs(self, div):
'''TODO: docs ... what is this for???'''
pass | [
"def",
"add_hidden_inputs",
"(",
"self",
",",
"div",
")",
":",
"pass"
] | https://github.com/Southpaw-TACTIC/TACTIC/blob/ba9b87aef0ee3b3ea51446f25b285ebbca06f62c/src/tactic/ui/panel/edit_wdg.py#L1126-L1128 |
||
odoo/odoo | 8de8c196a137f4ebbf67d7c7c83fee36f873f5c8 | addons/mrp/models/mrp_production.py | python | MrpProduction.action_view_mo_delivery | (self) | return action | This function returns an action that display picking related to
manufacturing order orders. It can either be a in a list or in a form
view, if there is only one picking to show. | This function returns an action that display picking related to
manufacturing order orders. It can either be a in a list or in a form
view, if there is only one picking to show. | [
"This",
"function",
"returns",
"an",
"action",
"that",
"display",
"picking",
"related",
"to",
"manufacturing",
"order",
"orders",
".",
"It",
"can",
"either",
"be",
"a",
"in",
"a",
"list",
"or",
"in",
"a",
"form",
"view",
"if",
"there",
"is",
"only",
"one",
"picking",
"to",
"show",
"."
] | def action_view_mo_delivery(self):
""" This function returns an action that display picking related to
manufacturing order orders. It can either be a in a list or in a form
view, if there is only one picking to show.
"""
self.ensure_one()
action = self.env["ir.actions.actions"]._for_xml_id("stock.action_picking_tree_all")
pickings = self.mapped('picking_ids')
if len(pickings) > 1:
action['domain'] = [('id', 'in', pickings.ids)]
elif pickings:
form_view = [(self.env.ref('stock.view_picking_form').id, 'form')]
if 'views' in action:
action['views'] = form_view + [(state,view) for state,view in action['views'] if view != 'form']
else:
action['views'] = form_view
action['res_id'] = pickings.id
action['context'] = dict(self._context, default_origin=self.name, create=False)
return action | [
"def",
"action_view_mo_delivery",
"(",
"self",
")",
":",
"self",
".",
"ensure_one",
"(",
")",
"action",
"=",
"self",
".",
"env",
"[",
"\"ir.actions.actions\"",
"]",
".",
"_for_xml_id",
"(",
"\"stock.action_picking_tree_all\"",
")",
"pickings",
"=",
"self",
".",
"mapped",
"(",
"'picking_ids'",
")",
"if",
"len",
"(",
"pickings",
")",
">",
"1",
":",
"action",
"[",
"'domain'",
"]",
"=",
"[",
"(",
"'id'",
",",
"'in'",
",",
"pickings",
".",
"ids",
")",
"]",
"elif",
"pickings",
":",
"form_view",
"=",
"[",
"(",
"self",
".",
"env",
".",
"ref",
"(",
"'stock.view_picking_form'",
")",
".",
"id",
",",
"'form'",
")",
"]",
"if",
"'views'",
"in",
"action",
":",
"action",
"[",
"'views'",
"]",
"=",
"form_view",
"+",
"[",
"(",
"state",
",",
"view",
")",
"for",
"state",
",",
"view",
"in",
"action",
"[",
"'views'",
"]",
"if",
"view",
"!=",
"'form'",
"]",
"else",
":",
"action",
"[",
"'views'",
"]",
"=",
"form_view",
"action",
"[",
"'res_id'",
"]",
"=",
"pickings",
".",
"id",
"action",
"[",
"'context'",
"]",
"=",
"dict",
"(",
"self",
".",
"_context",
",",
"default_origin",
"=",
"self",
".",
"name",
",",
"create",
"=",
"False",
")",
"return",
"action"
] | https://github.com/odoo/odoo/blob/8de8c196a137f4ebbf67d7c7c83fee36f873f5c8/addons/mrp/models/mrp_production.py#L386-L404 |
|
lukevink/hass-config-lajv | cc435372da788fdbeb28c370fe10d6b4090d5244 | custom_components/hacs/operational/setup.py | python | async_hacs_startup | () | return True | HACS startup tasks. | HACS startup tasks. | [
"HACS",
"startup",
"tasks",
"."
] | async def async_hacs_startup():
"""HACS startup tasks."""
hacs = get_hacs()
hacs.hass.data[DOMAIN] = hacs
try:
lovelace_info = await system_health_info(hacs.hass)
except (TypeError, HomeAssistantError):
# If this happens, the users YAML is not valid, we assume YAML mode
lovelace_info = {"mode": "yaml"}
hacs.log.debug(f"Configuration type: {hacs.configuration.config_type}")
hacs.version = INTEGRATION_VERSION
hacs.log.info(STARTUP)
hacs.core.config_path = hacs.hass.config.path()
hacs.system.ha_version = HAVERSION
# Setup websocket API
await async_setup_hacs_websockt_api()
# Set up frontend
await async_setup_frontend()
# Clear old storage files
await async_clear_storage()
hacs.system.lovelace_mode = lovelace_info.get("mode", "yaml")
hacs.enable()
hacs.github = GitHub(
hacs.configuration.token,
async_create_clientsession(hacs.hass),
headers=HACS_GITHUB_API_HEADERS,
)
hacs.data = HacsData()
can_update = await get_fetch_updates_for(hacs.github)
if can_update is None:
hacs.log.critical("Your GitHub token is not valid")
hacs.disable(HacsDisabledReason.INVALID_TOKEN)
return False
if can_update != 0:
hacs.log.debug(f"Can update {can_update} repositories")
else:
reset = datetime.fromtimestamp(int(hacs.github.client.ratelimits.reset))
hacs.log.error(
"HACS is ratelimited, HACS will resume setup when the limit is cleared (%02d:%02d:%02d)",
reset.hour,
reset.minute,
reset.second,
)
hacs.disable(HacsDisabledReason.RATE_LIMIT)
return False
# Check HACS Constrains
if not await hacs.hass.async_add_executor_job(check_constrains):
if hacs.configuration.config_type == "flow":
if hacs.configuration.config_entry is not None:
await async_remove_entry(hacs.hass, hacs.configuration.config_entry)
hacs.disable(HacsDisabledReason.CONSTRAINS)
return False
# Load HACS
if not await async_load_hacs_repository():
if hacs.configuration.config_type == "flow":
if hacs.configuration.config_entry is not None:
await async_remove_entry(hacs.hass, hacs.configuration.config_entry)
hacs.disable(HacsDisabledReason.LOAD_HACS)
return False
# Restore from storefiles
if not await hacs.data.restore():
hacs_repo = hacs.get_by_name("hacs/integration")
hacs_repo.pending_restart = True
if hacs.configuration.config_type == "flow":
if hacs.configuration.config_entry is not None:
await async_remove_entry(hacs.hass, hacs.configuration.config_entry)
hacs.disable(HacsDisabledReason.RESTORE)
return False
# Setup startup tasks
if hacs.hass.state == CoreState.running:
async_call_later(hacs.hass, 5, hacs.startup_tasks)
else:
hacs.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, hacs.startup_tasks)
# Set up sensor
await async_add_sensor()
# Mischief managed!
await hacs.async_set_stage(HacsStage.WAITING)
hacs.log.info(
"Setup complete, waiting for Home Assistant before startup tasks starts"
)
return True | [
"async",
"def",
"async_hacs_startup",
"(",
")",
":",
"hacs",
"=",
"get_hacs",
"(",
")",
"hacs",
".",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"=",
"hacs",
"try",
":",
"lovelace_info",
"=",
"await",
"system_health_info",
"(",
"hacs",
".",
"hass",
")",
"except",
"(",
"TypeError",
",",
"HomeAssistantError",
")",
":",
"# If this happens, the users YAML is not valid, we assume YAML mode",
"lovelace_info",
"=",
"{",
"\"mode\"",
":",
"\"yaml\"",
"}",
"hacs",
".",
"log",
".",
"debug",
"(",
"f\"Configuration type: {hacs.configuration.config_type}\"",
")",
"hacs",
".",
"version",
"=",
"INTEGRATION_VERSION",
"hacs",
".",
"log",
".",
"info",
"(",
"STARTUP",
")",
"hacs",
".",
"core",
".",
"config_path",
"=",
"hacs",
".",
"hass",
".",
"config",
".",
"path",
"(",
")",
"hacs",
".",
"system",
".",
"ha_version",
"=",
"HAVERSION",
"# Setup websocket API",
"await",
"async_setup_hacs_websockt_api",
"(",
")",
"# Set up frontend",
"await",
"async_setup_frontend",
"(",
")",
"# Clear old storage files",
"await",
"async_clear_storage",
"(",
")",
"hacs",
".",
"system",
".",
"lovelace_mode",
"=",
"lovelace_info",
".",
"get",
"(",
"\"mode\"",
",",
"\"yaml\"",
")",
"hacs",
".",
"enable",
"(",
")",
"hacs",
".",
"github",
"=",
"GitHub",
"(",
"hacs",
".",
"configuration",
".",
"token",
",",
"async_create_clientsession",
"(",
"hacs",
".",
"hass",
")",
",",
"headers",
"=",
"HACS_GITHUB_API_HEADERS",
",",
")",
"hacs",
".",
"data",
"=",
"HacsData",
"(",
")",
"can_update",
"=",
"await",
"get_fetch_updates_for",
"(",
"hacs",
".",
"github",
")",
"if",
"can_update",
"is",
"None",
":",
"hacs",
".",
"log",
".",
"critical",
"(",
"\"Your GitHub token is not valid\"",
")",
"hacs",
".",
"disable",
"(",
"HacsDisabledReason",
".",
"INVALID_TOKEN",
")",
"return",
"False",
"if",
"can_update",
"!=",
"0",
":",
"hacs",
".",
"log",
".",
"debug",
"(",
"f\"Can update {can_update} repositories\"",
")",
"else",
":",
"reset",
"=",
"datetime",
".",
"fromtimestamp",
"(",
"int",
"(",
"hacs",
".",
"github",
".",
"client",
".",
"ratelimits",
".",
"reset",
")",
")",
"hacs",
".",
"log",
".",
"error",
"(",
"\"HACS is ratelimited, HACS will resume setup when the limit is cleared (%02d:%02d:%02d)\"",
",",
"reset",
".",
"hour",
",",
"reset",
".",
"minute",
",",
"reset",
".",
"second",
",",
")",
"hacs",
".",
"disable",
"(",
"HacsDisabledReason",
".",
"RATE_LIMIT",
")",
"return",
"False",
"# Check HACS Constrains",
"if",
"not",
"await",
"hacs",
".",
"hass",
".",
"async_add_executor_job",
"(",
"check_constrains",
")",
":",
"if",
"hacs",
".",
"configuration",
".",
"config_type",
"==",
"\"flow\"",
":",
"if",
"hacs",
".",
"configuration",
".",
"config_entry",
"is",
"not",
"None",
":",
"await",
"async_remove_entry",
"(",
"hacs",
".",
"hass",
",",
"hacs",
".",
"configuration",
".",
"config_entry",
")",
"hacs",
".",
"disable",
"(",
"HacsDisabledReason",
".",
"CONSTRAINS",
")",
"return",
"False",
"# Load HACS",
"if",
"not",
"await",
"async_load_hacs_repository",
"(",
")",
":",
"if",
"hacs",
".",
"configuration",
".",
"config_type",
"==",
"\"flow\"",
":",
"if",
"hacs",
".",
"configuration",
".",
"config_entry",
"is",
"not",
"None",
":",
"await",
"async_remove_entry",
"(",
"hacs",
".",
"hass",
",",
"hacs",
".",
"configuration",
".",
"config_entry",
")",
"hacs",
".",
"disable",
"(",
"HacsDisabledReason",
".",
"LOAD_HACS",
")",
"return",
"False",
"# Restore from storefiles",
"if",
"not",
"await",
"hacs",
".",
"data",
".",
"restore",
"(",
")",
":",
"hacs_repo",
"=",
"hacs",
".",
"get_by_name",
"(",
"\"hacs/integration\"",
")",
"hacs_repo",
".",
"pending_restart",
"=",
"True",
"if",
"hacs",
".",
"configuration",
".",
"config_type",
"==",
"\"flow\"",
":",
"if",
"hacs",
".",
"configuration",
".",
"config_entry",
"is",
"not",
"None",
":",
"await",
"async_remove_entry",
"(",
"hacs",
".",
"hass",
",",
"hacs",
".",
"configuration",
".",
"config_entry",
")",
"hacs",
".",
"disable",
"(",
"HacsDisabledReason",
".",
"RESTORE",
")",
"return",
"False",
"# Setup startup tasks",
"if",
"hacs",
".",
"hass",
".",
"state",
"==",
"CoreState",
".",
"running",
":",
"async_call_later",
"(",
"hacs",
".",
"hass",
",",
"5",
",",
"hacs",
".",
"startup_tasks",
")",
"else",
":",
"hacs",
".",
"hass",
".",
"bus",
".",
"async_listen_once",
"(",
"EVENT_HOMEASSISTANT_STARTED",
",",
"hacs",
".",
"startup_tasks",
")",
"# Set up sensor",
"await",
"async_add_sensor",
"(",
")",
"# Mischief managed!",
"await",
"hacs",
".",
"async_set_stage",
"(",
"HacsStage",
".",
"WAITING",
")",
"hacs",
".",
"log",
".",
"info",
"(",
"\"Setup complete, waiting for Home Assistant before startup tasks starts\"",
")",
"return",
"True"
] | https://github.com/lukevink/hass-config-lajv/blob/cc435372da788fdbeb28c370fe10d6b4090d5244/custom_components/hacs/operational/setup.py#L123-L216 |
|
Nexedi/erp5 | 44df1959c0e21576cf5e9803d602d95efb4b695b | product/ERP5/Document/BusinessTemplate.py | python | ToolTemplateItem.remove | (self, context, **kw) | When we remove a tool, unregister it from the type provider. | When we remove a tool, unregister it from the type provider. | [
"When",
"we",
"remove",
"a",
"tool",
"unregister",
"it",
"from",
"the",
"type",
"provider",
"."
] | def remove(self, context, **kw):
""" When we remove a tool, unregister it from the type provider. """
portal = context.getPortalObject()
types_tool = portal.portal_types
remove_dict = kw.get('remove_object_dict', {})
keys = self._objects.keys()
for tool_id in keys:
if remove_dict.has_key(tool_id):
action = remove_dict[tool_id]
if 'remove' in action:
types_tool.type_provider_list = tuple([ \
x for x in types_tool.type_provider_list \
if x != tool_id])
PathTemplateItem.remove(self, context, **kw) | [
"def",
"remove",
"(",
"self",
",",
"context",
",",
"*",
"*",
"kw",
")",
":",
"portal",
"=",
"context",
".",
"getPortalObject",
"(",
")",
"types_tool",
"=",
"portal",
".",
"portal_types",
"remove_dict",
"=",
"kw",
".",
"get",
"(",
"'remove_object_dict'",
",",
"{",
"}",
")",
"keys",
"=",
"self",
".",
"_objects",
".",
"keys",
"(",
")",
"for",
"tool_id",
"in",
"keys",
":",
"if",
"remove_dict",
".",
"has_key",
"(",
"tool_id",
")",
":",
"action",
"=",
"remove_dict",
"[",
"tool_id",
"]",
"if",
"'remove'",
"in",
"action",
":",
"types_tool",
".",
"type_provider_list",
"=",
"tuple",
"(",
"[",
"x",
"for",
"x",
"in",
"types_tool",
".",
"type_provider_list",
"if",
"x",
"!=",
"tool_id",
"]",
")",
"PathTemplateItem",
".",
"remove",
"(",
"self",
",",
"context",
",",
"*",
"*",
"kw",
")"
] | https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/product/ERP5/Document/BusinessTemplate.py#L1869-L1882 |
||
finos/perspective | 910799a5c981ab501fd907f34a21b0ef5a9a914c | python/perspective/perspective/widget/widget.py | python | PerspectiveWidget._make_load_message | (self) | Send a message to the front-end either containing the name of a
hosted view in Python, so the front-end can create a table in the
Perspective WebAssembly client, or if `server` is True, the name of a
Table in python, or the serialized dataset with options if `client`
is True.
If the front-end requests data and it has not been loaded yet,
an error will be logged, and the front-end will wait for `load()` to
be called, which will notify the front-end of new data. | Send a message to the front-end either containing the name of a
hosted view in Python, so the front-end can create a table in the
Perspective WebAssembly client, or if `server` is True, the name of a
Table in python, or the serialized dataset with options if `client`
is True. | [
"Send",
"a",
"message",
"to",
"the",
"front",
"-",
"end",
"either",
"containing",
"the",
"name",
"of",
"a",
"hosted",
"view",
"in",
"Python",
"so",
"the",
"front",
"-",
"end",
"can",
"create",
"a",
"table",
"in",
"the",
"Perspective",
"WebAssembly",
"client",
"or",
"if",
"server",
"is",
"True",
"the",
"name",
"of",
"a",
"Table",
"in",
"python",
"or",
"the",
"serialized",
"dataset",
"with",
"options",
"if",
"client",
"is",
"True",
"."
] | def _make_load_message(self):
"""Send a message to the front-end either containing the name of a
hosted view in Python, so the front-end can create a table in the
Perspective WebAssembly client, or if `server` is True, the name of a
Table in python, or the serialized dataset with options if `client`
is True.
If the front-end requests data and it has not been loaded yet,
an error will be logged, and the front-end will wait for `load()` to
be called, which will notify the front-end of new data.
"""
msg_data = None
if self.client and self._data is not None:
# Send serialized data to the browser, which will run Perspective
# in client mode: there is no table in the Python kernel.
msg_data = {"data": self._data, "options": self._options}
elif self.server and self.table_name is not None:
# If the `server` kwarg is set during initialization, Perspective
# will run in server-only mode, where a Table is hosted in the
# kernel and the front-end proxies pivots, sorts, data requests
# etc. to the kernel and does not run a Table in the front-end.
msg_data = {"table_name": self.table_name}
elif self.table_name is not None:
# If a view is hosted by the widget's manager (by default),
# run Perspective in client-server mode: a Table will be created
# in the front-end that mirrors the Table hosted in the kernel,
# and updates and edits will be synchronized across the client
# and the server.
msg_data = {
"table_name": self.table_name,
"options": {},
}
index = self.table.get_index()
limit = self.table.get_limit()
if index is not None:
msg_data["options"]["index"] = index
elif limit is not None:
msg_data["options"]["limit"] = limit
if msg_data is not None:
return _PerspectiveWidgetMessage(-2, "table", msg_data)
else:
raise PerspectiveError(
"Widget does not have any data loaded - use the `load()` method to provide it with data."
) | [
"def",
"_make_load_message",
"(",
"self",
")",
":",
"msg_data",
"=",
"None",
"if",
"self",
".",
"client",
"and",
"self",
".",
"_data",
"is",
"not",
"None",
":",
"# Send serialized data to the browser, which will run Perspective",
"# in client mode: there is no table in the Python kernel.",
"msg_data",
"=",
"{",
"\"data\"",
":",
"self",
".",
"_data",
",",
"\"options\"",
":",
"self",
".",
"_options",
"}",
"elif",
"self",
".",
"server",
"and",
"self",
".",
"table_name",
"is",
"not",
"None",
":",
"# If the `server` kwarg is set during initialization, Perspective",
"# will run in server-only mode, where a Table is hosted in the",
"# kernel and the front-end proxies pivots, sorts, data requests",
"# etc. to the kernel and does not run a Table in the front-end.",
"msg_data",
"=",
"{",
"\"table_name\"",
":",
"self",
".",
"table_name",
"}",
"elif",
"self",
".",
"table_name",
"is",
"not",
"None",
":",
"# If a view is hosted by the widget's manager (by default),",
"# run Perspective in client-server mode: a Table will be created",
"# in the front-end that mirrors the Table hosted in the kernel,",
"# and updates and edits will be synchronized across the client",
"# and the server.",
"msg_data",
"=",
"{",
"\"table_name\"",
":",
"self",
".",
"table_name",
",",
"\"options\"",
":",
"{",
"}",
",",
"}",
"index",
"=",
"self",
".",
"table",
".",
"get_index",
"(",
")",
"limit",
"=",
"self",
".",
"table",
".",
"get_limit",
"(",
")",
"if",
"index",
"is",
"not",
"None",
":",
"msg_data",
"[",
"\"options\"",
"]",
"[",
"\"index\"",
"]",
"=",
"index",
"elif",
"limit",
"is",
"not",
"None",
":",
"msg_data",
"[",
"\"options\"",
"]",
"[",
"\"limit\"",
"]",
"=",
"limit",
"if",
"msg_data",
"is",
"not",
"None",
":",
"return",
"_PerspectiveWidgetMessage",
"(",
"-",
"2",
",",
"\"table\"",
",",
"msg_data",
")",
"else",
":",
"raise",
"PerspectiveError",
"(",
"\"Widget does not have any data loaded - use the `load()` method to provide it with data.\"",
")"
] | https://github.com/finos/perspective/blob/910799a5c981ab501fd907f34a21b0ef5a9a914c/python/perspective/perspective/widget/widget.py#L493-L540 |
||
crits/crits | 6b357daa5c3060cf622d3a3b0c7b41a9ca69c049 | crits/core/views.py | python | get_user_source_list | (request) | Get a user's source list. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse` | Get a user's source list. Should be an AJAX POST. | [
"Get",
"a",
"user",
"s",
"source",
"list",
".",
"Should",
"be",
"an",
"AJAX",
"POST",
"."
] | def get_user_source_list(request):
"""
Get a user's source list. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
if request.method == 'POST' and request.is_ajax():
user_source_access = user_sources('%s' % request.user.username)
message = {'success': True,
'data': user_source_access}
return HttpResponse(json.dumps(message),
content_type="application/json")
else:
error = "Expected AJAX POST"
return render(request, "error.html", {"error" : error }) | [
"def",
"get_user_source_list",
"(",
"request",
")",
":",
"if",
"request",
".",
"method",
"==",
"'POST'",
"and",
"request",
".",
"is_ajax",
"(",
")",
":",
"user_source_access",
"=",
"user_sources",
"(",
"'%s'",
"%",
"request",
".",
"user",
".",
"username",
")",
"message",
"=",
"{",
"'success'",
":",
"True",
",",
"'data'",
":",
"user_source_access",
"}",
"return",
"HttpResponse",
"(",
"json",
".",
"dumps",
"(",
"message",
")",
",",
"content_type",
"=",
"\"application/json\"",
")",
"else",
":",
"error",
"=",
"\"Expected AJAX POST\"",
"return",
"render",
"(",
"request",
",",
"\"error.html\"",
",",
"{",
"\"error\"",
":",
"error",
"}",
")"
] | https://github.com/crits/crits/blob/6b357daa5c3060cf622d3a3b0c7b41a9ca69c049/crits/core/views.py#L1389-L1406 |
||
tuan3w/visual_search | 62665d4ac58669bad8e7f5ffed18d2914ffa8b01 | visual_search/lib/setup.py | python | locate_cuda | () | return cudaconfig | Locate the CUDA environment on the system
Returns a dict with keys 'home', 'nvcc', 'include', and 'lib64'
and values giving the absolute path to each directory.
Starts by looking for the CUDAHOME env variable. If not found, everything
is based on finding 'nvcc' in the PATH. | Locate the CUDA environment on the system | [
"Locate",
"the",
"CUDA",
"environment",
"on",
"the",
"system"
] | def locate_cuda():
"""Locate the CUDA environment on the system
Returns a dict with keys 'home', 'nvcc', 'include', and 'lib64'
and values giving the absolute path to each directory.
Starts by looking for the CUDAHOME env variable. If not found, everything
is based on finding 'nvcc' in the PATH.
"""
# first check if the CUDAHOME env variable is in use
if 'CUDAHOME' in os.environ:
home = os.environ['CUDAHOME']
nvcc = pjoin(home, 'bin', 'nvcc')
else:
# otherwise, search the PATH for NVCC
default_path = pjoin(os.sep, 'usr', 'local', 'cuda', 'bin')
nvcc = find_in_path('nvcc', os.environ['PATH'] + os.pathsep + default_path)
if nvcc is None:
raise EnvironmentError('The nvcc binary could not be '
'located in your $PATH. Either add it to your path, or set $CUDAHOME')
home = os.path.dirname(os.path.dirname(nvcc))
cudaconfig = {'home':home, 'nvcc':nvcc,
'include': pjoin(home, 'include'),
'lib64': pjoin(home, 'lib64')}
for k, v in cudaconfig.iteritems():
if not os.path.exists(v):
raise EnvironmentError('The CUDA %s path could not be located in %s' % (k, v))
return cudaconfig | [
"def",
"locate_cuda",
"(",
")",
":",
"# first check if the CUDAHOME env variable is in use",
"if",
"'CUDAHOME'",
"in",
"os",
".",
"environ",
":",
"home",
"=",
"os",
".",
"environ",
"[",
"'CUDAHOME'",
"]",
"nvcc",
"=",
"pjoin",
"(",
"home",
",",
"'bin'",
",",
"'nvcc'",
")",
"else",
":",
"# otherwise, search the PATH for NVCC",
"default_path",
"=",
"pjoin",
"(",
"os",
".",
"sep",
",",
"'usr'",
",",
"'local'",
",",
"'cuda'",
",",
"'bin'",
")",
"nvcc",
"=",
"find_in_path",
"(",
"'nvcc'",
",",
"os",
".",
"environ",
"[",
"'PATH'",
"]",
"+",
"os",
".",
"pathsep",
"+",
"default_path",
")",
"if",
"nvcc",
"is",
"None",
":",
"raise",
"EnvironmentError",
"(",
"'The nvcc binary could not be '",
"'located in your $PATH. Either add it to your path, or set $CUDAHOME'",
")",
"home",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"nvcc",
")",
")",
"cudaconfig",
"=",
"{",
"'home'",
":",
"home",
",",
"'nvcc'",
":",
"nvcc",
",",
"'include'",
":",
"pjoin",
"(",
"home",
",",
"'include'",
")",
",",
"'lib64'",
":",
"pjoin",
"(",
"home",
",",
"'lib64'",
")",
"}",
"for",
"k",
",",
"v",
"in",
"cudaconfig",
".",
"iteritems",
"(",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"v",
")",
":",
"raise",
"EnvironmentError",
"(",
"'The CUDA %s path could not be located in %s'",
"%",
"(",
"k",
",",
"v",
")",
")",
"return",
"cudaconfig"
] | https://github.com/tuan3w/visual_search/blob/62665d4ac58669bad8e7f5ffed18d2914ffa8b01/visual_search/lib/setup.py#L24-L54 |
|
jongman/algospot | 89294f76d4beaac1cc1e0767bfa42a96f77ccc2f | www/newsfeed/utils.py | python | get_objects_for_user | (user, perms, klass=None, use_groups=True, any_perm=False) | return objects | Returns queryset of objects for which a given ``user`` has *all*
permissions present at ``perms``.
:param user: ``User`` instance for which objects would be returned
:param perms: single permission string, or sequence of permission strings
which should be checked.
If ``klass`` parameter is not given, those should be full permission
names rather than only codenames (i.e. ``auth.change_user``). If more than
one permission is present within sequence, their content type **must** be
the same or ``MixedContentTypeError`` exception would be raised.
:param klass: may be a Model, Manager or QuerySet object. If not given
this parameter would be computed based on given ``params``.
:param use_groups: if ``False``, wouldn't check user's groups object
permissions. Default is ``True``.
:param any_perm: if True, any of permission in sequence is accepted
:raises MixedContentTypeError: when computed content type for ``perms``
and/or ``klass`` clashes.
:raises WrongAppError: if cannot compute app label for given ``perms``/
``klass``.
Example::
>>> from guardian.shortcuts import get_objects_for_user
>>> joe = User.objects.get(username='joe')
>>> get_objects_for_user(joe, 'auth.change_group')
[]
>>> from guardian.shortcuts import assign_perm
>>> group = Group.objects.create('some group')
>>> assign_perm('auth.change_group', joe, group)
>>> get_objects_for_user(joe, 'auth.change_group')
[<Group some group>]
The permission string can also be an iterable. Continuing with the previous example:
>>> get_objects_for_user(joe, ['auth.change_group', 'auth.delete_group'])
[]
>>> get_objects_for_user(joe, ['auth.change_group', 'auth.delete_group'], any_perm=True)
[<Group some group>]
>>> assign_perm('auth.delete_group', joe, group)
>>> get_objects_for_user(joe, ['auth.change_group', 'auth.delete_group'])
[<Group some group>] | Returns queryset of objects for which a given ``user`` has *all*
permissions present at ``perms``. | [
"Returns",
"queryset",
"of",
"objects",
"for",
"which",
"a",
"given",
"user",
"has",
"*",
"all",
"*",
"permissions",
"present",
"at",
"perms",
"."
] | def get_objects_for_user(user, perms, klass=None, use_groups=True, any_perm=False):
"""
Returns queryset of objects for which a given ``user`` has *all*
permissions present at ``perms``.
:param user: ``User`` instance for which objects would be returned
:param perms: single permission string, or sequence of permission strings
which should be checked.
If ``klass`` parameter is not given, those should be full permission
names rather than only codenames (i.e. ``auth.change_user``). If more than
one permission is present within sequence, their content type **must** be
the same or ``MixedContentTypeError`` exception would be raised.
:param klass: may be a Model, Manager or QuerySet object. If not given
this parameter would be computed based on given ``params``.
:param use_groups: if ``False``, wouldn't check user's groups object
permissions. Default is ``True``.
:param any_perm: if True, any of permission in sequence is accepted
:raises MixedContentTypeError: when computed content type for ``perms``
and/or ``klass`` clashes.
:raises WrongAppError: if cannot compute app label for given ``perms``/
``klass``.
Example::
>>> from guardian.shortcuts import get_objects_for_user
>>> joe = User.objects.get(username='joe')
>>> get_objects_for_user(joe, 'auth.change_group')
[]
>>> from guardian.shortcuts import assign_perm
>>> group = Group.objects.create('some group')
>>> assign_perm('auth.change_group', joe, group)
>>> get_objects_for_user(joe, 'auth.change_group')
[<Group some group>]
The permission string can also be an iterable. Continuing with the previous example:
>>> get_objects_for_user(joe, ['auth.change_group', 'auth.delete_group'])
[]
>>> get_objects_for_user(joe, ['auth.change_group', 'auth.delete_group'], any_perm=True)
[<Group some group>]
>>> assign_perm('auth.delete_group', joe, group)
>>> get_objects_for_user(joe, ['auth.change_group', 'auth.delete_group'])
[<Group some group>]
"""
if isinstance(perms, basestring):
perms = [perms]
ctype = None
app_label = None
codenames = set()
# Compute codenames set and ctype if possible
for perm in perms:
if '.' in perm:
new_app_label, codename = perm.split('.', 1)
if app_label is not None and app_label != new_app_label:
raise MixedContentTypeError("Given perms must have same app "
"label (%s != %s)" % (app_label, new_app_label))
else:
app_label = new_app_label
else:
codename = perm
codenames.add(codename)
if app_label is not None:
new_ctype = ContentType.objects.get(app_label=app_label,
permission__codename=codename)
if ctype is not None and ctype != new_ctype:
raise MixedContentTypeError("ContentType was once computed "
"to be %s and another one %s" % (ctype, new_ctype))
else:
ctype = new_ctype
# Compute queryset and ctype if still missing
if ctype is None and klass is None:
raise WrongAppError("Cannot determine content type")
elif ctype is None and klass is not None:
queryset = _get_queryset(klass)
ctype = ContentType.objects.get_for_model(queryset.model)
elif ctype is not None and klass is None:
queryset = _get_queryset(ctype.model_class())
else:
queryset = _get_queryset(klass)
if ctype.model_class() != queryset.model:
raise MixedContentTypeError("Content type for given perms and "
"klass differs")
# At this point, we should have both ctype and queryset and they should
# match which means: ctype.model_class() == queryset.model
# we should also have ``codenames`` list
# First check if user is superuser and if so, return queryset immediately
if user.is_superuser:
return queryset
# Now we should extract list of pk values for which we would filter queryset
user_model = get_user_obj_perms_model(queryset.model)
user_obj_perms_queryset = (user_model.objects
.filter(user=user)
.filter(permission__content_type=ctype)
.filter(permission__codename__in=codenames))
if user_model.objects.is_generic():
fields = ['object_pk', 'permission__codename']
else:
fields = ['content_object__pk', 'permission__codename']
if use_groups:
group_model = get_group_obj_perms_model(queryset.model)
group_filters = {
'permission__content_type': ctype,
'permission__codename__in': codenames,
'group__%s' % get_user_model()._meta.module_name: user,
}
groups_obj_perms_queryset = group_model.objects.filter(**group_filters)
if group_model.objects.is_generic():
fields = ['object_pk', 'permission__codename']
else:
fields = ['content_object__pk', 'permission__codename']
if not any_perm:
user_obj_perms = user_obj_perms_queryset.values_list(*fields)
groups_obj_perms = groups_obj_perms_queryset.values_list(*fields)
data = list(user_obj_perms) + list(groups_obj_perms)
keyfunc = lambda t: t[0] # sorting/grouping by pk (first in result tuple)
data = sorted(data, key=keyfunc)
pk_list = []
for pk, group in groupby(data, keyfunc):
obj_codenames = set((e[1] for e in group))
if codenames.issubset(obj_codenames):
pk_list.append(pk)
objects = queryset.filter(pk__in=pk_list)
return objects
if not any_perm:
counts = user_obj_perms_queryset.values(fields[0]).annotate(object_pk_count=Count(fields[0]))
user_obj_perms_queryset = counts.filter(object_pk_count__gte=len(codenames))
objects = queryset.filter(pk__in=user_obj_perms_queryset.values_list(fields[0], flat=True))
if use_groups:
objects |= queryset.filter(pk__in=groups_obj_perms_queryset.values_list(fields[0], flat=True))
return objects | [
"def",
"get_objects_for_user",
"(",
"user",
",",
"perms",
",",
"klass",
"=",
"None",
",",
"use_groups",
"=",
"True",
",",
"any_perm",
"=",
"False",
")",
":",
"if",
"isinstance",
"(",
"perms",
",",
"basestring",
")",
":",
"perms",
"=",
"[",
"perms",
"]",
"ctype",
"=",
"None",
"app_label",
"=",
"None",
"codenames",
"=",
"set",
"(",
")",
"# Compute codenames set and ctype if possible",
"for",
"perm",
"in",
"perms",
":",
"if",
"'.'",
"in",
"perm",
":",
"new_app_label",
",",
"codename",
"=",
"perm",
".",
"split",
"(",
"'.'",
",",
"1",
")",
"if",
"app_label",
"is",
"not",
"None",
"and",
"app_label",
"!=",
"new_app_label",
":",
"raise",
"MixedContentTypeError",
"(",
"\"Given perms must have same app \"",
"\"label (%s != %s)\"",
"%",
"(",
"app_label",
",",
"new_app_label",
")",
")",
"else",
":",
"app_label",
"=",
"new_app_label",
"else",
":",
"codename",
"=",
"perm",
"codenames",
".",
"add",
"(",
"codename",
")",
"if",
"app_label",
"is",
"not",
"None",
":",
"new_ctype",
"=",
"ContentType",
".",
"objects",
".",
"get",
"(",
"app_label",
"=",
"app_label",
",",
"permission__codename",
"=",
"codename",
")",
"if",
"ctype",
"is",
"not",
"None",
"and",
"ctype",
"!=",
"new_ctype",
":",
"raise",
"MixedContentTypeError",
"(",
"\"ContentType was once computed \"",
"\"to be %s and another one %s\"",
"%",
"(",
"ctype",
",",
"new_ctype",
")",
")",
"else",
":",
"ctype",
"=",
"new_ctype",
"# Compute queryset and ctype if still missing",
"if",
"ctype",
"is",
"None",
"and",
"klass",
"is",
"None",
":",
"raise",
"WrongAppError",
"(",
"\"Cannot determine content type\"",
")",
"elif",
"ctype",
"is",
"None",
"and",
"klass",
"is",
"not",
"None",
":",
"queryset",
"=",
"_get_queryset",
"(",
"klass",
")",
"ctype",
"=",
"ContentType",
".",
"objects",
".",
"get_for_model",
"(",
"queryset",
".",
"model",
")",
"elif",
"ctype",
"is",
"not",
"None",
"and",
"klass",
"is",
"None",
":",
"queryset",
"=",
"_get_queryset",
"(",
"ctype",
".",
"model_class",
"(",
")",
")",
"else",
":",
"queryset",
"=",
"_get_queryset",
"(",
"klass",
")",
"if",
"ctype",
".",
"model_class",
"(",
")",
"!=",
"queryset",
".",
"model",
":",
"raise",
"MixedContentTypeError",
"(",
"\"Content type for given perms and \"",
"\"klass differs\"",
")",
"# At this point, we should have both ctype and queryset and they should",
"# match which means: ctype.model_class() == queryset.model",
"# we should also have ``codenames`` list",
"# First check if user is superuser and if so, return queryset immediately",
"if",
"user",
".",
"is_superuser",
":",
"return",
"queryset",
"# Now we should extract list of pk values for which we would filter queryset",
"user_model",
"=",
"get_user_obj_perms_model",
"(",
"queryset",
".",
"model",
")",
"user_obj_perms_queryset",
"=",
"(",
"user_model",
".",
"objects",
".",
"filter",
"(",
"user",
"=",
"user",
")",
".",
"filter",
"(",
"permission__content_type",
"=",
"ctype",
")",
".",
"filter",
"(",
"permission__codename__in",
"=",
"codenames",
")",
")",
"if",
"user_model",
".",
"objects",
".",
"is_generic",
"(",
")",
":",
"fields",
"=",
"[",
"'object_pk'",
",",
"'permission__codename'",
"]",
"else",
":",
"fields",
"=",
"[",
"'content_object__pk'",
",",
"'permission__codename'",
"]",
"if",
"use_groups",
":",
"group_model",
"=",
"get_group_obj_perms_model",
"(",
"queryset",
".",
"model",
")",
"group_filters",
"=",
"{",
"'permission__content_type'",
":",
"ctype",
",",
"'permission__codename__in'",
":",
"codenames",
",",
"'group__%s'",
"%",
"get_user_model",
"(",
")",
".",
"_meta",
".",
"module_name",
":",
"user",
",",
"}",
"groups_obj_perms_queryset",
"=",
"group_model",
".",
"objects",
".",
"filter",
"(",
"*",
"*",
"group_filters",
")",
"if",
"group_model",
".",
"objects",
".",
"is_generic",
"(",
")",
":",
"fields",
"=",
"[",
"'object_pk'",
",",
"'permission__codename'",
"]",
"else",
":",
"fields",
"=",
"[",
"'content_object__pk'",
",",
"'permission__codename'",
"]",
"if",
"not",
"any_perm",
":",
"user_obj_perms",
"=",
"user_obj_perms_queryset",
".",
"values_list",
"(",
"*",
"fields",
")",
"groups_obj_perms",
"=",
"groups_obj_perms_queryset",
".",
"values_list",
"(",
"*",
"fields",
")",
"data",
"=",
"list",
"(",
"user_obj_perms",
")",
"+",
"list",
"(",
"groups_obj_perms",
")",
"keyfunc",
"=",
"lambda",
"t",
":",
"t",
"[",
"0",
"]",
"# sorting/grouping by pk (first in result tuple)",
"data",
"=",
"sorted",
"(",
"data",
",",
"key",
"=",
"keyfunc",
")",
"pk_list",
"=",
"[",
"]",
"for",
"pk",
",",
"group",
"in",
"groupby",
"(",
"data",
",",
"keyfunc",
")",
":",
"obj_codenames",
"=",
"set",
"(",
"(",
"e",
"[",
"1",
"]",
"for",
"e",
"in",
"group",
")",
")",
"if",
"codenames",
".",
"issubset",
"(",
"obj_codenames",
")",
":",
"pk_list",
".",
"append",
"(",
"pk",
")",
"objects",
"=",
"queryset",
".",
"filter",
"(",
"pk__in",
"=",
"pk_list",
")",
"return",
"objects",
"if",
"not",
"any_perm",
":",
"counts",
"=",
"user_obj_perms_queryset",
".",
"values",
"(",
"fields",
"[",
"0",
"]",
")",
".",
"annotate",
"(",
"object_pk_count",
"=",
"Count",
"(",
"fields",
"[",
"0",
"]",
")",
")",
"user_obj_perms_queryset",
"=",
"counts",
".",
"filter",
"(",
"object_pk_count__gte",
"=",
"len",
"(",
"codenames",
")",
")",
"objects",
"=",
"queryset",
".",
"filter",
"(",
"pk__in",
"=",
"user_obj_perms_queryset",
".",
"values_list",
"(",
"fields",
"[",
"0",
"]",
",",
"flat",
"=",
"True",
")",
")",
"if",
"use_groups",
":",
"objects",
"|=",
"queryset",
".",
"filter",
"(",
"pk__in",
"=",
"groups_obj_perms_queryset",
".",
"values_list",
"(",
"fields",
"[",
"0",
"]",
",",
"flat",
"=",
"True",
")",
")",
"return",
"objects"
] | https://github.com/jongman/algospot/blob/89294f76d4beaac1cc1e0767bfa42a96f77ccc2f/www/newsfeed/utils.py#L25-L165 |
|
jam-py/jam-py | 0821492cdff8665928e0f093a4435aa64285a45c | jam/third_party/sqlalchemy/sql/operators.py | python | Operators.op | (
self, opstring, precedence=0, is_comparison=False, return_type=None
) | return against | produce a generic operator function.
e.g.::
somecolumn.op("*")(5)
produces::
somecolumn * 5
This function can also be used to make bitwise operators explicit. For
example::
somecolumn.op('&')(0xff)
is a bitwise AND of the value in ``somecolumn``.
:param operator: a string which will be output as the infix operator
between this element and the expression passed to the
generated function.
:param precedence: precedence to apply to the operator, when
parenthesizing expressions. A lower number will cause the expression
to be parenthesized when applied against another operator with
higher precedence. The default value of ``0`` is lower than all
operators except for the comma (``,``) and ``AS`` operators.
A value of 100 will be higher or equal to all operators, and -100
will be lower than or equal to all operators.
:param is_comparison: if True, the operator will be considered as a
"comparison" operator, that is which evaluates to a boolean
true/false value, like ``==``, ``>``, etc. This flag should be set
so that ORM relationships can establish that the operator is a
comparison operator when used in a custom join condition.
.. versionadded:: 0.9.2 - added the
:paramref:`.Operators.op.is_comparison` flag.
:param return_type: a :class:`.TypeEngine` class or object that will
force the return type of an expression produced by this operator
to be of that type. By default, operators that specify
:paramref:`.Operators.op.is_comparison` will resolve to
:class:`.Boolean`, and those that do not will be of the same
type as the left-hand operand.
.. versionadded:: 1.2.0b3 - added the
:paramref:`.Operators.op.return_type` argument.
.. seealso::
:ref:`types_operators`
:ref:`relationship_custom_operator` | produce a generic operator function. | [
"produce",
"a",
"generic",
"operator",
"function",
"."
] | def op(
self, opstring, precedence=0, is_comparison=False, return_type=None
):
"""produce a generic operator function.
e.g.::
somecolumn.op("*")(5)
produces::
somecolumn * 5
This function can also be used to make bitwise operators explicit. For
example::
somecolumn.op('&')(0xff)
is a bitwise AND of the value in ``somecolumn``.
:param operator: a string which will be output as the infix operator
between this element and the expression passed to the
generated function.
:param precedence: precedence to apply to the operator, when
parenthesizing expressions. A lower number will cause the expression
to be parenthesized when applied against another operator with
higher precedence. The default value of ``0`` is lower than all
operators except for the comma (``,``) and ``AS`` operators.
A value of 100 will be higher or equal to all operators, and -100
will be lower than or equal to all operators.
:param is_comparison: if True, the operator will be considered as a
"comparison" operator, that is which evaluates to a boolean
true/false value, like ``==``, ``>``, etc. This flag should be set
so that ORM relationships can establish that the operator is a
comparison operator when used in a custom join condition.
.. versionadded:: 0.9.2 - added the
:paramref:`.Operators.op.is_comparison` flag.
:param return_type: a :class:`.TypeEngine` class or object that will
force the return type of an expression produced by this operator
to be of that type. By default, operators that specify
:paramref:`.Operators.op.is_comparison` will resolve to
:class:`.Boolean`, and those that do not will be of the same
type as the left-hand operand.
.. versionadded:: 1.2.0b3 - added the
:paramref:`.Operators.op.return_type` argument.
.. seealso::
:ref:`types_operators`
:ref:`relationship_custom_operator`
"""
operator = custom_op(opstring, precedence, is_comparison, return_type)
def against(other):
return operator(self, other)
return against | [
"def",
"op",
"(",
"self",
",",
"opstring",
",",
"precedence",
"=",
"0",
",",
"is_comparison",
"=",
"False",
",",
"return_type",
"=",
"None",
")",
":",
"operator",
"=",
"custom_op",
"(",
"opstring",
",",
"precedence",
",",
"is_comparison",
",",
"return_type",
")",
"def",
"against",
"(",
"other",
")",
":",
"return",
"operator",
"(",
"self",
",",
"other",
")",
"return",
"against"
] | https://github.com/jam-py/jam-py/blob/0821492cdff8665928e0f093a4435aa64285a45c/jam/third_party/sqlalchemy/sql/operators.py#L124-L187 |
|
mozilla/spidernode | aafa9e5273f954f272bb4382fc007af14674b4c2 | deps/v8/tools/isolate_driver.py | python | prepare_isolate_call | (args, output) | Gathers all information required to run isolate.py later.
Dumps it as JSON to |output| file. | Gathers all information required to run isolate.py later. | [
"Gathers",
"all",
"information",
"required",
"to",
"run",
"isolate",
".",
"py",
"later",
"."
] | def prepare_isolate_call(args, output):
"""Gathers all information required to run isolate.py later.
Dumps it as JSON to |output| file.
"""
with open(output, 'wb') as f:
json.dump({
'args': args,
'dir': os.getcwd(),
'version': 1,
}, f, indent=2, sort_keys=True) | [
"def",
"prepare_isolate_call",
"(",
"args",
",",
"output",
")",
":",
"with",
"open",
"(",
"output",
",",
"'wb'",
")",
"as",
"f",
":",
"json",
".",
"dump",
"(",
"{",
"'args'",
":",
"args",
",",
"'dir'",
":",
"os",
".",
"getcwd",
"(",
")",
",",
"'version'",
":",
"1",
",",
"}",
",",
"f",
",",
"indent",
"=",
"2",
",",
"sort_keys",
"=",
"True",
")"
] | https://github.com/mozilla/spidernode/blob/aafa9e5273f954f272bb4382fc007af14674b4c2/deps/v8/tools/isolate_driver.py#L276-L286 |
||
prometheus-ar/vot.ar | 72d8fa1ea08fe417b64340b98dff68df8364afdf | msa/modulos/ingreso_datos/controlador.py | python | Controlador.msg_mesaypin_incorrecto | (self) | Muestra el mensaje de mesa y pin incorrecto. | Muestra el mensaje de mesa y pin incorrecto. | [
"Muestra",
"el",
"mensaje",
"de",
"mesa",
"y",
"pin",
"incorrecto",
"."
] | def msg_mesaypin_incorrecto(self):
"""Muestra el mensaje de mesa y pin incorrecto."""
self.cargar_dialogo(callback_template="msg_mesa_y_pin_incorrectos",
aceptar=self.hide_dialogo) | [
"def",
"msg_mesaypin_incorrecto",
"(",
"self",
")",
":",
"self",
".",
"cargar_dialogo",
"(",
"callback_template",
"=",
"\"msg_mesa_y_pin_incorrectos\"",
",",
"aceptar",
"=",
"self",
".",
"hide_dialogo",
")"
] | https://github.com/prometheus-ar/vot.ar/blob/72d8fa1ea08fe417b64340b98dff68df8364afdf/msa/modulos/ingreso_datos/controlador.py#L223-L226 |
||
arschles/go-in-5-minutes | c02918d1def999b2d59c060818e8adb735e24719 | episode24/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py | python | GetMacBundleResources | (product_dir, xcode_settings, resources) | Yields (output, resource) pairs for every resource in |resources|.
Only call this for mac bundle targets.
Args:
product_dir: Path to the directory containing the output bundle,
relative to the build directory.
xcode_settings: The XcodeSettings of the current target.
resources: A list of bundle resources, relative to the build directory. | Yields (output, resource) pairs for every resource in |resources|.
Only call this for mac bundle targets. | [
"Yields",
"(",
"output",
"resource",
")",
"pairs",
"for",
"every",
"resource",
"in",
"|resources|",
".",
"Only",
"call",
"this",
"for",
"mac",
"bundle",
"targets",
"."
] | def GetMacBundleResources(product_dir, xcode_settings, resources):
"""Yields (output, resource) pairs for every resource in |resources|.
Only call this for mac bundle targets.
Args:
product_dir: Path to the directory containing the output bundle,
relative to the build directory.
xcode_settings: The XcodeSettings of the current target.
resources: A list of bundle resources, relative to the build directory.
"""
dest = os.path.join(product_dir,
xcode_settings.GetBundleResourceFolder())
for res in resources:
output = dest
# The make generator doesn't support it, so forbid it everywhere
# to keep the generators more interchangable.
assert ' ' not in res, (
"Spaces in resource filenames not supported (%s)" % res)
# Split into (path,file).
res_parts = os.path.split(res)
# Now split the path into (prefix,maybe.lproj).
lproj_parts = os.path.split(res_parts[0])
# If the resource lives in a .lproj bundle, add that to the destination.
if lproj_parts[1].endswith('.lproj'):
output = os.path.join(output, lproj_parts[1])
output = os.path.join(output, res_parts[1])
# Compiled XIB files are referred to by .nib.
if output.endswith('.xib'):
output = os.path.splitext(output)[0] + '.nib'
# Compiled storyboard files are referred to by .storyboardc.
if output.endswith('.storyboard'):
output = os.path.splitext(output)[0] + '.storyboardc'
yield output, res | [
"def",
"GetMacBundleResources",
"(",
"product_dir",
",",
"xcode_settings",
",",
"resources",
")",
":",
"dest",
"=",
"os",
".",
"path",
".",
"join",
"(",
"product_dir",
",",
"xcode_settings",
".",
"GetBundleResourceFolder",
"(",
")",
")",
"for",
"res",
"in",
"resources",
":",
"output",
"=",
"dest",
"# The make generator doesn't support it, so forbid it everywhere",
"# to keep the generators more interchangable.",
"assert",
"' '",
"not",
"in",
"res",
",",
"(",
"\"Spaces in resource filenames not supported (%s)\"",
"%",
"res",
")",
"# Split into (path,file).",
"res_parts",
"=",
"os",
".",
"path",
".",
"split",
"(",
"res",
")",
"# Now split the path into (prefix,maybe.lproj).",
"lproj_parts",
"=",
"os",
".",
"path",
".",
"split",
"(",
"res_parts",
"[",
"0",
"]",
")",
"# If the resource lives in a .lproj bundle, add that to the destination.",
"if",
"lproj_parts",
"[",
"1",
"]",
".",
"endswith",
"(",
"'.lproj'",
")",
":",
"output",
"=",
"os",
".",
"path",
".",
"join",
"(",
"output",
",",
"lproj_parts",
"[",
"1",
"]",
")",
"output",
"=",
"os",
".",
"path",
".",
"join",
"(",
"output",
",",
"res_parts",
"[",
"1",
"]",
")",
"# Compiled XIB files are referred to by .nib.",
"if",
"output",
".",
"endswith",
"(",
"'.xib'",
")",
":",
"output",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"output",
")",
"[",
"0",
"]",
"+",
"'.nib'",
"# Compiled storyboard files are referred to by .storyboardc.",
"if",
"output",
".",
"endswith",
"(",
"'.storyboard'",
")",
":",
"output",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"output",
")",
"[",
"0",
"]",
"+",
"'.storyboardc'",
"yield",
"output",
",",
"res"
] | https://github.com/arschles/go-in-5-minutes/blob/c02918d1def999b2d59c060818e8adb735e24719/episode24/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py#L1345-L1382 |
||
mceSystems/node-jsc | 90634f3064fab8e89a85b3942f0cc5054acc86fa | deps/v8/third_party/jinja2/utils.py | python | LRUCache.__delitem__ | (self, key) | Remove an item from the cache dict.
Raise a `KeyError` if it does not exist. | Remove an item from the cache dict.
Raise a `KeyError` if it does not exist. | [
"Remove",
"an",
"item",
"from",
"the",
"cache",
"dict",
".",
"Raise",
"a",
"KeyError",
"if",
"it",
"does",
"not",
"exist",
"."
] | def __delitem__(self, key):
"""Remove an item from the cache dict.
Raise a `KeyError` if it does not exist.
"""
self._wlock.acquire()
try:
del self._mapping[key]
try:
self._remove(key)
except ValueError:
# __getitem__ is not locked, it might happen
pass
finally:
self._wlock.release() | [
"def",
"__delitem__",
"(",
"self",
",",
"key",
")",
":",
"self",
".",
"_wlock",
".",
"acquire",
"(",
")",
"try",
":",
"del",
"self",
".",
"_mapping",
"[",
"key",
"]",
"try",
":",
"self",
".",
"_remove",
"(",
"key",
")",
"except",
"ValueError",
":",
"# __getitem__ is not locked, it might happen",
"pass",
"finally",
":",
"self",
".",
"_wlock",
".",
"release",
"(",
")"
] | https://github.com/mceSystems/node-jsc/blob/90634f3064fab8e89a85b3942f0cc5054acc86fa/deps/v8/third_party/jinja2/utils.py#L428-L441 |
||
nodejs/node | ac3c33c1646bf46104c15ae035982c06364da9b8 | tools/gyp/pylib/gyp/generator/analyzer.py | python | _GenerateTargets | (data, target_list, target_dicts, toplevel_dir, files, build_files) | return name_to_target, matching_targets, roots & build_file_targets | Returns a tuple of the following:
. A dictionary mapping from fully qualified name to Target.
. A list of the targets that have a source file in |files|.
. Targets that constitute the 'all' target. See description at top of file
for details on the 'all' target.
This sets the |match_status| of the targets that contain any of the source
files in |files| to MATCH_STATUS_MATCHES.
|toplevel_dir| is the root of the source tree. | Returns a tuple of the following:
. A dictionary mapping from fully qualified name to Target.
. A list of the targets that have a source file in |files|.
. Targets that constitute the 'all' target. See description at top of file
for details on the 'all' target.
This sets the |match_status| of the targets that contain any of the source
files in |files| to MATCH_STATUS_MATCHES.
|toplevel_dir| is the root of the source tree. | [
"Returns",
"a",
"tuple",
"of",
"the",
"following",
":",
".",
"A",
"dictionary",
"mapping",
"from",
"fully",
"qualified",
"name",
"to",
"Target",
".",
".",
"A",
"list",
"of",
"the",
"targets",
"that",
"have",
"a",
"source",
"file",
"in",
"|files|",
".",
".",
"Targets",
"that",
"constitute",
"the",
"all",
"target",
".",
"See",
"description",
"at",
"top",
"of",
"file",
"for",
"details",
"on",
"the",
"all",
"target",
".",
"This",
"sets",
"the",
"|match_status|",
"of",
"the",
"targets",
"that",
"contain",
"any",
"of",
"the",
"source",
"files",
"in",
"|files|",
"to",
"MATCH_STATUS_MATCHES",
".",
"|toplevel_dir|",
"is",
"the",
"root",
"of",
"the",
"source",
"tree",
"."
] | def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files, build_files):
"""Returns a tuple of the following:
. A dictionary mapping from fully qualified name to Target.
. A list of the targets that have a source file in |files|.
. Targets that constitute the 'all' target. See description at top of file
for details on the 'all' target.
This sets the |match_status| of the targets that contain any of the source
files in |files| to MATCH_STATUS_MATCHES.
|toplevel_dir| is the root of the source tree."""
# Maps from target name to Target.
name_to_target = {}
# Targets that matched.
matching_targets = []
# Queue of targets to visit.
targets_to_visit = target_list[:]
# Maps from build file to a boolean indicating whether the build file is in
# |files|.
build_file_in_files = {}
# Root targets across all files.
roots = set()
# Set of Targets in |build_files|.
build_file_targets = set()
while len(targets_to_visit) > 0:
target_name = targets_to_visit.pop()
created_target, target = _GetOrCreateTargetByName(name_to_target, target_name)
if created_target:
roots.add(target)
elif target.visited:
continue
target.visited = True
target.requires_build = _DoesTargetTypeRequireBuild(target_dicts[target_name])
target_type = target_dicts[target_name]["type"]
target.is_executable = target_type == "executable"
target.is_static_library = target_type == "static_library"
target.is_or_has_linked_ancestor = (
target_type == "executable" or target_type == "shared_library"
)
build_file = gyp.common.ParseQualifiedTarget(target_name)[0]
if build_file not in build_file_in_files:
build_file_in_files[build_file] = _WasBuildFileModified(
build_file, data, files, toplevel_dir
)
if build_file in build_files:
build_file_targets.add(target)
# If a build file (or any of its included files) is modified we assume all
# targets in the file are modified.
if build_file_in_files[build_file]:
print("matching target from modified build file", target_name)
target.match_status = MATCH_STATUS_MATCHES
matching_targets.append(target)
else:
sources = _ExtractSources(
target_name, target_dicts[target_name], toplevel_dir
)
for source in sources:
if _ToGypPath(os.path.normpath(source)) in files:
print("target", target_name, "matches", source)
target.match_status = MATCH_STATUS_MATCHES
matching_targets.append(target)
break
# Add dependencies to visit as well as updating back pointers for deps.
for dep in target_dicts[target_name].get("dependencies", []):
targets_to_visit.append(dep)
created_dep_target, dep_target = _GetOrCreateTargetByName(
name_to_target, dep
)
if not created_dep_target:
roots.discard(dep_target)
target.deps.add(dep_target)
dep_target.back_deps.add(target)
return name_to_target, matching_targets, roots & build_file_targets | [
"def",
"_GenerateTargets",
"(",
"data",
",",
"target_list",
",",
"target_dicts",
",",
"toplevel_dir",
",",
"files",
",",
"build_files",
")",
":",
"# Maps from target name to Target.",
"name_to_target",
"=",
"{",
"}",
"# Targets that matched.",
"matching_targets",
"=",
"[",
"]",
"# Queue of targets to visit.",
"targets_to_visit",
"=",
"target_list",
"[",
":",
"]",
"# Maps from build file to a boolean indicating whether the build file is in",
"# |files|.",
"build_file_in_files",
"=",
"{",
"}",
"# Root targets across all files.",
"roots",
"=",
"set",
"(",
")",
"# Set of Targets in |build_files|.",
"build_file_targets",
"=",
"set",
"(",
")",
"while",
"len",
"(",
"targets_to_visit",
")",
">",
"0",
":",
"target_name",
"=",
"targets_to_visit",
".",
"pop",
"(",
")",
"created_target",
",",
"target",
"=",
"_GetOrCreateTargetByName",
"(",
"name_to_target",
",",
"target_name",
")",
"if",
"created_target",
":",
"roots",
".",
"add",
"(",
"target",
")",
"elif",
"target",
".",
"visited",
":",
"continue",
"target",
".",
"visited",
"=",
"True",
"target",
".",
"requires_build",
"=",
"_DoesTargetTypeRequireBuild",
"(",
"target_dicts",
"[",
"target_name",
"]",
")",
"target_type",
"=",
"target_dicts",
"[",
"target_name",
"]",
"[",
"\"type\"",
"]",
"target",
".",
"is_executable",
"=",
"target_type",
"==",
"\"executable\"",
"target",
".",
"is_static_library",
"=",
"target_type",
"==",
"\"static_library\"",
"target",
".",
"is_or_has_linked_ancestor",
"=",
"(",
"target_type",
"==",
"\"executable\"",
"or",
"target_type",
"==",
"\"shared_library\"",
")",
"build_file",
"=",
"gyp",
".",
"common",
".",
"ParseQualifiedTarget",
"(",
"target_name",
")",
"[",
"0",
"]",
"if",
"build_file",
"not",
"in",
"build_file_in_files",
":",
"build_file_in_files",
"[",
"build_file",
"]",
"=",
"_WasBuildFileModified",
"(",
"build_file",
",",
"data",
",",
"files",
",",
"toplevel_dir",
")",
"if",
"build_file",
"in",
"build_files",
":",
"build_file_targets",
".",
"add",
"(",
"target",
")",
"# If a build file (or any of its included files) is modified we assume all",
"# targets in the file are modified.",
"if",
"build_file_in_files",
"[",
"build_file",
"]",
":",
"print",
"(",
"\"matching target from modified build file\"",
",",
"target_name",
")",
"target",
".",
"match_status",
"=",
"MATCH_STATUS_MATCHES",
"matching_targets",
".",
"append",
"(",
"target",
")",
"else",
":",
"sources",
"=",
"_ExtractSources",
"(",
"target_name",
",",
"target_dicts",
"[",
"target_name",
"]",
",",
"toplevel_dir",
")",
"for",
"source",
"in",
"sources",
":",
"if",
"_ToGypPath",
"(",
"os",
".",
"path",
".",
"normpath",
"(",
"source",
")",
")",
"in",
"files",
":",
"print",
"(",
"\"target\"",
",",
"target_name",
",",
"\"matches\"",
",",
"source",
")",
"target",
".",
"match_status",
"=",
"MATCH_STATUS_MATCHES",
"matching_targets",
".",
"append",
"(",
"target",
")",
"break",
"# Add dependencies to visit as well as updating back pointers for deps.",
"for",
"dep",
"in",
"target_dicts",
"[",
"target_name",
"]",
".",
"get",
"(",
"\"dependencies\"",
",",
"[",
"]",
")",
":",
"targets_to_visit",
".",
"append",
"(",
"dep",
")",
"created_dep_target",
",",
"dep_target",
"=",
"_GetOrCreateTargetByName",
"(",
"name_to_target",
",",
"dep",
")",
"if",
"not",
"created_dep_target",
":",
"roots",
".",
"discard",
"(",
"dep_target",
")",
"target",
".",
"deps",
".",
"add",
"(",
"dep_target",
")",
"dep_target",
".",
"back_deps",
".",
"add",
"(",
"target",
")",
"return",
"name_to_target",
",",
"matching_targets",
",",
"roots",
"&",
"build_file_targets"
] | https://github.com/nodejs/node/blob/ac3c33c1646bf46104c15ae035982c06364da9b8/tools/gyp/pylib/gyp/generator/analyzer.py#L340-L424 |
|
replit-archive/jsrepl | 36d79b6288ca5d26208e8bade2a168c6ebcb2376 | extern/python/reloop-closured/lib/python2.7/decimal.py | python | Context.Etop | (self) | return int(self.Emax - self.prec + 1) | Returns maximum exponent (= Emax - prec + 1) | Returns maximum exponent (= Emax - prec + 1) | [
"Returns",
"maximum",
"exponent",
"(",
"=",
"Emax",
"-",
"prec",
"+",
"1",
")"
] | def Etop(self):
"""Returns maximum exponent (= Emax - prec + 1)"""
return int(self.Emax - self.prec + 1) | [
"def",
"Etop",
"(",
"self",
")",
":",
"return",
"int",
"(",
"self",
".",
"Emax",
"-",
"self",
".",
"prec",
"+",
"1",
")"
] | https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/reloop-closured/lib/python2.7/decimal.py#L3871-L3873 |
|
dataarts/webgl-globe | 8d746a3dbf95e57ec3c6c2c6effe920c95135253 | globe-vertex-texture/models/convert_obj_three.py | python | normalize | (v) | Normalize 3d vector | Normalize 3d vector | [
"Normalize",
"3d",
"vector"
] | def normalize(v):
"""Normalize 3d vector"""
l = math.sqrt(v[0]*v[0] + v[1]*v[1] + v[2]*v[2])
if l:
v[0] /= l
v[1] /= l
v[2] /= l | [
"def",
"normalize",
"(",
"v",
")",
":",
"l",
"=",
"math",
".",
"sqrt",
"(",
"v",
"[",
"0",
"]",
"*",
"v",
"[",
"0",
"]",
"+",
"v",
"[",
"1",
"]",
"*",
"v",
"[",
"1",
"]",
"+",
"v",
"[",
"2",
"]",
"*",
"v",
"[",
"2",
"]",
")",
"if",
"l",
":",
"v",
"[",
"0",
"]",
"/=",
"l",
"v",
"[",
"1",
"]",
"/=",
"l",
"v",
"[",
"2",
"]",
"/=",
"l"
] | https://github.com/dataarts/webgl-globe/blob/8d746a3dbf95e57ec3c6c2c6effe920c95135253/globe-vertex-texture/models/convert_obj_three.py#L354-L361 |
||
mlavin/django-selectable | f07a0fe387c448031b1357635e6b23cbb004960e | selectable/base.py | python | LookupBase.paginate_results | (self, results, options) | return results | Return a django.core.paginator.Page of results. | Return a django.core.paginator.Page of results. | [
"Return",
"a",
"django",
".",
"core",
".",
"paginator",
".",
"Page",
"of",
"results",
"."
] | def paginate_results(self, results, options):
"Return a django.core.paginator.Page of results."
limit = options.get('limit', settings.SELECTABLE_MAX_LIMIT)
paginator = Paginator(results, limit)
page = options.get('page', 1)
try:
results = paginator.page(page)
except (EmptyPage, InvalidPage):
results = paginator.page(paginator.num_pages)
return results | [
"def",
"paginate_results",
"(",
"self",
",",
"results",
",",
"options",
")",
":",
"limit",
"=",
"options",
".",
"get",
"(",
"'limit'",
",",
"settings",
".",
"SELECTABLE_MAX_LIMIT",
")",
"paginator",
"=",
"Paginator",
"(",
"results",
",",
"limit",
")",
"page",
"=",
"options",
".",
"get",
"(",
"'page'",
",",
"1",
")",
"try",
":",
"results",
"=",
"paginator",
".",
"page",
"(",
"page",
")",
"except",
"(",
"EmptyPage",
",",
"InvalidPage",
")",
":",
"results",
"=",
"paginator",
".",
"page",
"(",
"paginator",
".",
"num_pages",
")",
"return",
"results"
] | https://github.com/mlavin/django-selectable/blob/f07a0fe387c448031b1357635e6b23cbb004960e/selectable/base.py#L77-L86 |
|
jam-py/jam-py | 0821492cdff8665928e0f093a4435aa64285a45c | jam/third_party/sqlalchemy/sql/base.py | python | _expand_cloned | (elements) | return itertools.chain(*[x._cloned_set for x in elements]) | expand the given set of ClauseElements to be the set of all 'cloned'
predecessors. | expand the given set of ClauseElements to be the set of all 'cloned'
predecessors. | [
"expand",
"the",
"given",
"set",
"of",
"ClauseElements",
"to",
"be",
"the",
"set",
"of",
"all",
"cloned",
"predecessors",
"."
] | def _expand_cloned(elements):
"""expand the given set of ClauseElements to be the set of all 'cloned'
predecessors.
"""
return itertools.chain(*[x._cloned_set for x in elements]) | [
"def",
"_expand_cloned",
"(",
"elements",
")",
":",
"return",
"itertools",
".",
"chain",
"(",
"*",
"[",
"x",
".",
"_cloned_set",
"for",
"x",
"in",
"elements",
"]",
")"
] | https://github.com/jam-py/jam-py/blob/0821492cdff8665928e0f093a4435aa64285a45c/jam/third_party/sqlalchemy/sql/base.py#L91-L96 |
|
korolr/dotfiles | 8e46933503ecb8d8651739ffeb1d2d4f0f5c6524 | .config/sublime-text-3/Backup/20181226200445/backrefs/st3/backrefs/bre.py | python | finditer | (pattern, string, flags=0) | return re.finditer(_apply_search_backrefs(pattern, flags), string, flags) | Finditer after applying backrefs. | Finditer after applying backrefs. | [
"Finditer",
"after",
"applying",
"backrefs",
"."
] | def finditer(pattern, string, flags=0):
"""Finditer after applying backrefs."""
return re.finditer(_apply_search_backrefs(pattern, flags), string, flags) | [
"def",
"finditer",
"(",
"pattern",
",",
"string",
",",
"flags",
"=",
"0",
")",
":",
"return",
"re",
".",
"finditer",
"(",
"_apply_search_backrefs",
"(",
"pattern",
",",
"flags",
")",
",",
"string",
",",
"flags",
")"
] | https://github.com/korolr/dotfiles/blob/8e46933503ecb8d8651739ffeb1d2d4f0f5c6524/.config/sublime-text-3/Backup/20181226200445/backrefs/st3/backrefs/bre.py#L947-L950 |
|
klaasnicolaas/Smarthome-homeassistant-config | 610bd35f4e8cdb4a1f41165b0ccb9251c76f5644 | custom_components/hacs/config_flow.py | python | HacsOptionsFlowHandler.async_step_init | (self, user_input=None) | return await self.async_step_user() | Manage the options. | Manage the options. | [
"Manage",
"the",
"options",
"."
] | async def async_step_init(self, user_input=None):
"""Manage the options."""
return await self.async_step_user() | [
"async",
"def",
"async_step_init",
"(",
"self",
",",
"user_input",
"=",
"None",
")",
":",
"return",
"await",
"self",
".",
"async_step_user",
"(",
")"
] | https://github.com/klaasnicolaas/Smarthome-homeassistant-config/blob/610bd35f4e8cdb4a1f41165b0ccb9251c76f5644/custom_components/hacs/config_flow.py#L80-L82 |
|
nodejs/quic | 5baab3f3a05548d3b51bea98868412b08766e34d | tools/gyp/pylib/gyp/xcode_emulation.py | python | XcodeSettings._GetBundleBinaryPath | (self) | return os.path.join(self.GetBundleExecutableFolderPath(), \
self.GetExecutableName()) | Returns the name of the bundle binary of by this target.
E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles. | Returns the name of the bundle binary of by this target.
E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles. | [
"Returns",
"the",
"name",
"of",
"the",
"bundle",
"binary",
"of",
"by",
"this",
"target",
".",
"E",
".",
"g",
".",
"Chromium",
".",
"app",
"/",
"Contents",
"/",
"MacOS",
"/",
"Chromium",
".",
"Only",
"valid",
"for",
"bundles",
"."
] | def _GetBundleBinaryPath(self):
"""Returns the name of the bundle binary of by this target.
E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles."""
assert self._IsBundle()
return os.path.join(self.GetBundleExecutableFolderPath(), \
self.GetExecutableName()) | [
"def",
"_GetBundleBinaryPath",
"(",
"self",
")",
":",
"assert",
"self",
".",
"_IsBundle",
"(",
")",
"return",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"GetBundleExecutableFolderPath",
"(",
")",
",",
"self",
".",
"GetExecutableName",
"(",
")",
")"
] | https://github.com/nodejs/quic/blob/5baab3f3a05548d3b51bea98868412b08766e34d/tools/gyp/pylib/gyp/xcode_emulation.py#L424-L429 |
|
alex-cory/fasthacks | 72b099f11df2e5640d61e55c80706c3b234eacbe | notes/JavaScript/nodejs/nodeJS_Lynda/command_line_tools/node_modules/connect-mongo/node_modules/mongodb/upload.py | python | GetSubversionPropertyChanges | (filename) | return None | Return a Subversion's 'Property changes on ...' string, which is used in
the patch file.
Args:
filename: filename whose property might be set by [auto-props] config.
Returns:
A string like 'Property changes on |filename| ...' if given |filename|
matches any entries in [auto-props] section. None, otherwise. | Return a Subversion's 'Property changes on ...' string, which is used in
the patch file. | [
"Return",
"a",
"Subversion",
"s",
"Property",
"changes",
"on",
"...",
"string",
"which",
"is",
"used",
"in",
"the",
"patch",
"file",
"."
] | def GetSubversionPropertyChanges(filename):
"""Return a Subversion's 'Property changes on ...' string, which is used in
the patch file.
Args:
filename: filename whose property might be set by [auto-props] config.
Returns:
A string like 'Property changes on |filename| ...' if given |filename|
matches any entries in [auto-props] section. None, otherwise.
"""
global svn_auto_props_map
if svn_auto_props_map is None:
svn_auto_props_map = LoadSubversionAutoProperties()
all_props = []
for file_pattern, props in svn_auto_props_map.items():
if fnmatch.fnmatch(filename, file_pattern):
all_props.extend(props)
if all_props:
return FormatSubversionPropertyChanges(filename, all_props)
return None | [
"def",
"GetSubversionPropertyChanges",
"(",
"filename",
")",
":",
"global",
"svn_auto_props_map",
"if",
"svn_auto_props_map",
"is",
"None",
":",
"svn_auto_props_map",
"=",
"LoadSubversionAutoProperties",
"(",
")",
"all_props",
"=",
"[",
"]",
"for",
"file_pattern",
",",
"props",
"in",
"svn_auto_props_map",
".",
"items",
"(",
")",
":",
"if",
"fnmatch",
".",
"fnmatch",
"(",
"filename",
",",
"file_pattern",
")",
":",
"all_props",
".",
"extend",
"(",
"props",
")",
"if",
"all_props",
":",
"return",
"FormatSubversionPropertyChanges",
"(",
"filename",
",",
"all_props",
")",
"return",
"None"
] | https://github.com/alex-cory/fasthacks/blob/72b099f11df2e5640d61e55c80706c3b234eacbe/notes/JavaScript/nodejs/nodeJS_Lynda/command_line_tools/node_modules/connect-mongo/node_modules/mongodb/upload.py#L2096-L2117 |
|
Nexedi/erp5 | 44df1959c0e21576cf5e9803d602d95efb4b695b | product/ERP5Form/ListBox.py | python | ListBoxRenderer.__call__ | (self, **kw) | return self.render(**kw) | Render the ListBox. The real rendering must be done the method "render" which should
be defined in subclasses. | Render the ListBox. The real rendering must be done the method "render" which should
be defined in subclasses. | [
"Render",
"the",
"ListBox",
".",
"The",
"real",
"rendering",
"must",
"be",
"done",
"the",
"method",
"render",
"which",
"should",
"be",
"defined",
"in",
"subclasses",
"."
] | def __call__(self, **kw):
"""Render the ListBox. The real rendering must be done the method "render" which should
be defined in subclasses.
"""
return self.render(**kw) | [
"def",
"__call__",
"(",
"self",
",",
"*",
"*",
"kw",
")",
":",
"return",
"self",
".",
"render",
"(",
"*",
"*",
"kw",
")"
] | https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/product/ERP5Form/ListBox.py#L2048-L2052 |
|
nodejs/quic | 5baab3f3a05548d3b51bea98868412b08766e34d | deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py | python | NinjaWriter.WriteMacBundleResources | (self, resources, bundle_depends) | return xcassets | Writes ninja edges for 'mac_bundle_resources'. | Writes ninja edges for 'mac_bundle_resources'. | [
"Writes",
"ninja",
"edges",
"for",
"mac_bundle_resources",
"."
] | def WriteMacBundleResources(self, resources, bundle_depends):
"""Writes ninja edges for 'mac_bundle_resources'."""
xcassets = []
for output, res in gyp.xcode_emulation.GetMacBundleResources(
generator_default_variables['PRODUCT_DIR'],
self.xcode_settings, map(self.GypPathToNinja, resources)):
output = self.ExpandSpecial(output)
if os.path.splitext(output)[-1] != '.xcassets':
isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
self.ninja.build(output, 'mac_tool', res,
variables=[('mactool_cmd', 'copy-bundle-resource'), \
('binary', isBinary)])
bundle_depends.append(output)
else:
xcassets.append(res)
return xcassets | [
"def",
"WriteMacBundleResources",
"(",
"self",
",",
"resources",
",",
"bundle_depends",
")",
":",
"xcassets",
"=",
"[",
"]",
"for",
"output",
",",
"res",
"in",
"gyp",
".",
"xcode_emulation",
".",
"GetMacBundleResources",
"(",
"generator_default_variables",
"[",
"'PRODUCT_DIR'",
"]",
",",
"self",
".",
"xcode_settings",
",",
"map",
"(",
"self",
".",
"GypPathToNinja",
",",
"resources",
")",
")",
":",
"output",
"=",
"self",
".",
"ExpandSpecial",
"(",
"output",
")",
"if",
"os",
".",
"path",
".",
"splitext",
"(",
"output",
")",
"[",
"-",
"1",
"]",
"!=",
"'.xcassets'",
":",
"isBinary",
"=",
"self",
".",
"xcode_settings",
".",
"IsBinaryOutputFormat",
"(",
"self",
".",
"config_name",
")",
"self",
".",
"ninja",
".",
"build",
"(",
"output",
",",
"'mac_tool'",
",",
"res",
",",
"variables",
"=",
"[",
"(",
"'mactool_cmd'",
",",
"'copy-bundle-resource'",
")",
",",
"(",
"'binary'",
",",
"isBinary",
")",
"]",
")",
"bundle_depends",
".",
"append",
"(",
"output",
")",
"else",
":",
"xcassets",
".",
"append",
"(",
"res",
")",
"return",
"xcassets"
] | https://github.com/nodejs/quic/blob/5baab3f3a05548d3b51bea98868412b08766e34d/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py#L769-L784 |
|
JoneXiong/DjangoX | c2a723e209ef13595f571923faac7eb29e4c8150 | xadmin/management/commands/syncperm.py | python | get_or_create_perm | (name, codename, content_type) | return Permission.objects.get_or_create(name=name, codename=codename, content_type=content_type) | 创建不存在的权限
:param name:
:param codename:
:param content_type:
:return: | 创建不存在的权限
:param name:
:param codename:
:param content_type:
:return: | [
"创建不存在的权限",
":",
"param",
"name",
":",
":",
"param",
"codename",
":",
":",
"param",
"content_type",
":",
":",
"return",
":"
] | def get_or_create_perm(name, codename, content_type):
'''
创建不存在的权限
:param name:
:param codename:
:param content_type:
:return:
'''
return Permission.objects.get_or_create(name=name, codename=codename, content_type=content_type) | [
"def",
"get_or_create_perm",
"(",
"name",
",",
"codename",
",",
"content_type",
")",
":",
"return",
"Permission",
".",
"objects",
".",
"get_or_create",
"(",
"name",
"=",
"name",
",",
"codename",
"=",
"codename",
",",
"content_type",
"=",
"content_type",
")"
] | https://github.com/JoneXiong/DjangoX/blob/c2a723e209ef13595f571923faac7eb29e4c8150/xadmin/management/commands/syncperm.py#L37-L45 |
|
fullscale/pypes | 2171d0141e184999ba03c3e535ecc9bfddef10be | ui/pypesvds/lib/extras/BeautifulSoup.py | python | PageElement.findPreviousSiblings | (self, name=None, attrs={}, text=None,
limit=None, **kwargs) | return self._findAll(name, attrs, text, limit,
self.previousSiblingGenerator, **kwargs) | Returns the siblings of this Tag that match the given
criteria and appear before this Tag in the document. | Returns the siblings of this Tag that match the given
criteria and appear before this Tag in the document. | [
"Returns",
"the",
"siblings",
"of",
"this",
"Tag",
"that",
"match",
"the",
"given",
"criteria",
"and",
"appear",
"before",
"this",
"Tag",
"in",
"the",
"document",
"."
] | def findPreviousSiblings(self, name=None, attrs={}, text=None,
limit=None, **kwargs):
"""Returns the siblings of this Tag that match the given
criteria and appear before this Tag in the document."""
return self._findAll(name, attrs, text, limit,
self.previousSiblingGenerator, **kwargs) | [
"def",
"findPreviousSiblings",
"(",
"self",
",",
"name",
"=",
"None",
",",
"attrs",
"=",
"{",
"}",
",",
"text",
"=",
"None",
",",
"limit",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
".",
"_findAll",
"(",
"name",
",",
"attrs",
",",
"text",
",",
"limit",
",",
"self",
".",
"previousSiblingGenerator",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/fullscale/pypes/blob/2171d0141e184999ba03c3e535ecc9bfddef10be/ui/pypesvds/lib/extras/BeautifulSoup.py#L284-L289 |
|
mceSystems/node-jsc | 90634f3064fab8e89a85b3942f0cc5054acc86fa | tools/gyp/pylib/gyp/generator/msvs.py | python | _GetMSBuildExternalBuilderTargets | (spec) | return targets | Return a list of MSBuild targets for external builders.
The "Build" and "Clean" targets are always generated. If the spec contains
'msvs_external_builder_clcompile_cmd', then the "ClCompile" target will also
be generated, to support building selected C/C++ files.
Arguments:
spec: The gyp target spec.
Returns:
List of MSBuild 'Target' specs. | Return a list of MSBuild targets for external builders. | [
"Return",
"a",
"list",
"of",
"MSBuild",
"targets",
"for",
"external",
"builders",
"."
] | def _GetMSBuildExternalBuilderTargets(spec):
"""Return a list of MSBuild targets for external builders.
The "Build" and "Clean" targets are always generated. If the spec contains
'msvs_external_builder_clcompile_cmd', then the "ClCompile" target will also
be generated, to support building selected C/C++ files.
Arguments:
spec: The gyp target spec.
Returns:
List of MSBuild 'Target' specs.
"""
build_cmd = _BuildCommandLineForRuleRaw(
spec, spec['msvs_external_builder_build_cmd'],
False, False, False, False)
build_target = ['Target', {'Name': 'Build'}]
build_target.append(['Exec', {'Command': build_cmd}])
clean_cmd = _BuildCommandLineForRuleRaw(
spec, spec['msvs_external_builder_clean_cmd'],
False, False, False, False)
clean_target = ['Target', {'Name': 'Clean'}]
clean_target.append(['Exec', {'Command': clean_cmd}])
targets = [build_target, clean_target]
if spec.get('msvs_external_builder_clcompile_cmd'):
clcompile_cmd = _BuildCommandLineForRuleRaw(
spec, spec['msvs_external_builder_clcompile_cmd'],
False, False, False, False)
clcompile_target = ['Target', {'Name': 'ClCompile'}]
clcompile_target.append(['Exec', {'Command': clcompile_cmd}])
targets.append(clcompile_target)
return targets | [
"def",
"_GetMSBuildExternalBuilderTargets",
"(",
"spec",
")",
":",
"build_cmd",
"=",
"_BuildCommandLineForRuleRaw",
"(",
"spec",
",",
"spec",
"[",
"'msvs_external_builder_build_cmd'",
"]",
",",
"False",
",",
"False",
",",
"False",
",",
"False",
")",
"build_target",
"=",
"[",
"'Target'",
",",
"{",
"'Name'",
":",
"'Build'",
"}",
"]",
"build_target",
".",
"append",
"(",
"[",
"'Exec'",
",",
"{",
"'Command'",
":",
"build_cmd",
"}",
"]",
")",
"clean_cmd",
"=",
"_BuildCommandLineForRuleRaw",
"(",
"spec",
",",
"spec",
"[",
"'msvs_external_builder_clean_cmd'",
"]",
",",
"False",
",",
"False",
",",
"False",
",",
"False",
")",
"clean_target",
"=",
"[",
"'Target'",
",",
"{",
"'Name'",
":",
"'Clean'",
"}",
"]",
"clean_target",
".",
"append",
"(",
"[",
"'Exec'",
",",
"{",
"'Command'",
":",
"clean_cmd",
"}",
"]",
")",
"targets",
"=",
"[",
"build_target",
",",
"clean_target",
"]",
"if",
"spec",
".",
"get",
"(",
"'msvs_external_builder_clcompile_cmd'",
")",
":",
"clcompile_cmd",
"=",
"_BuildCommandLineForRuleRaw",
"(",
"spec",
",",
"spec",
"[",
"'msvs_external_builder_clcompile_cmd'",
"]",
",",
"False",
",",
"False",
",",
"False",
",",
"False",
")",
"clcompile_target",
"=",
"[",
"'Target'",
",",
"{",
"'Name'",
":",
"'ClCompile'",
"}",
"]",
"clcompile_target",
".",
"append",
"(",
"[",
"'Exec'",
",",
"{",
"'Command'",
":",
"clcompile_cmd",
"}",
"]",
")",
"targets",
".",
"append",
"(",
"clcompile_target",
")",
"return",
"targets"
] | https://github.com/mceSystems/node-jsc/blob/90634f3064fab8e89a85b3942f0cc5054acc86fa/tools/gyp/pylib/gyp/generator/msvs.py#L3425-L3459 |
|
RedHatDemos/SecurityDemos | 084933d871e8b9ae07e34b479e83830fa324e374 | 2020Labs/RHELSecurity/ansible/agnosticd-old/tools/archive/configs/RHCLS-ScalableInfra-demo/examples/startstop.py | python | appdir | () | return os.path.join(homedir(), '.startstop') | Return our application directory. | Return our application directory. | [
"Return",
"our",
"application",
"directory",
"."
] | def appdir():
"""Return our application directory."""
return os.path.join(homedir(), '.startstop') | [
"def",
"appdir",
"(",
")",
":",
"return",
"os",
".",
"path",
".",
"join",
"(",
"homedir",
"(",
")",
",",
"'.startstop'",
")"
] | https://github.com/RedHatDemos/SecurityDemos/blob/084933d871e8b9ae07e34b479e83830fa324e374/2020Labs/RHELSecurity/ansible/agnosticd-old/tools/archive/configs/RHCLS-ScalableInfra-demo/examples/startstop.py#L60-L62 |
|
Nexedi/erp5 | 44df1959c0e21576cf5e9803d602d95efb4b695b | product/ERP5Type/Core/ActionInformation.py | python | ActionInformation.getActionText | (self) | return getattr(self.getAction(), 'text', None) | Return the text of the action expression | Return the text of the action expression | [
"Return",
"the",
"text",
"of",
"the",
"action",
"expression"
] | def getActionText(self):
"""Return the text of the action expression"""
return getattr(self.getAction(), 'text', None) | [
"def",
"getActionText",
"(",
"self",
")",
":",
"return",
"getattr",
"(",
"self",
".",
"getAction",
"(",
")",
",",
"'text'",
",",
"None",
")"
] | https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/product/ERP5Type/Core/ActionInformation.py#L111-L113 |
|
windmill/windmill | 994bd992b17f3f2d6f6b276fe17391fea08f32c3 | windmill/bin/admin_options.py | python | process_module | (module) | Process this modules option list | Process this modules option list | [
"Process",
"this",
"modules",
"option",
"list"
] | def process_module(module):
"""Process this modules option list"""
options_dict = {}
flags_dict = {}
for klass in [getattr(module, cname) for cname in dir(module) if hasattr(getattr(module, cname), 'option_names')]:
if klass.option_names[0] is not None:
flags_dict[klass.option_names[0]] = klass()
options_dict[klass.option_names[1]] = klass()
module.options_dict = options_dict
module.flags_dict = flags_dict | [
"def",
"process_module",
"(",
"module",
")",
":",
"options_dict",
"=",
"{",
"}",
"flags_dict",
"=",
"{",
"}",
"for",
"klass",
"in",
"[",
"getattr",
"(",
"module",
",",
"cname",
")",
"for",
"cname",
"in",
"dir",
"(",
"module",
")",
"if",
"hasattr",
"(",
"getattr",
"(",
"module",
",",
"cname",
")",
",",
"'option_names'",
")",
"]",
":",
"if",
"klass",
".",
"option_names",
"[",
"0",
"]",
"is",
"not",
"None",
":",
"flags_dict",
"[",
"klass",
".",
"option_names",
"[",
"0",
"]",
"]",
"=",
"klass",
"(",
")",
"options_dict",
"[",
"klass",
".",
"option_names",
"[",
"1",
"]",
"]",
"=",
"klass",
"(",
")",
"module",
".",
"options_dict",
"=",
"options_dict",
"module",
".",
"flags_dict",
"=",
"flags_dict"
] | https://github.com/windmill/windmill/blob/994bd992b17f3f2d6f6b276fe17391fea08f32c3/windmill/bin/admin_options.py#L209-L220 |
||
smartschat/cort | 2349f0308a4115acb89d442fe945533bdb3b70e2 | cort/analysis/spanning_tree_algorithms.py | python | recall_accessibility | (entity, partitioned_entity) | return sorted(edges) | Compute a spanning tree by choosing edges according to the accessibility
of the antecedent.
First, if a mention has an out-degree of at least one in the partitioned
entity, take the edge with the closest mention distance as an edge for
the spanning tree. Otherwise, proceed as follows.
If a mention m is a proper name or a common noun, choose an antecedent as
follows:
- if a proper name antecedent exists, take the closest and output this
pair as an edge
- else if a common noun antecedent exists, take the closest and output
this pair as an edge
- else take the closest preceding mention and output this pair as an
edge
For all other mentions, take the closest preceding mention and output
this pair as an edge.
Args:
entity (EntityGraph): The EntityGraph for the entity for which the
spanning tree should be computed.
partitioned_entity (EntityGraph): A partition of the entity -- not
used for this algorithm.
Returns:
list(Mention, Mention): A list of mention pairs, which constitute the
edges of the spanning tree. For a pair (m, n), n appears later in
the text than m. | Compute a spanning tree by choosing edges according to the accessibility
of the antecedent. | [
"Compute",
"a",
"spanning",
"tree",
"by",
"choosing",
"edges",
"according",
"to",
"the",
"accessibility",
"of",
"the",
"antecedent",
"."
] | def recall_accessibility(entity, partitioned_entity):
""" Compute a spanning tree by choosing edges according to the accessibility
of the antecedent.
First, if a mention has an out-degree of at least one in the partitioned
entity, take the edge with the closest mention distance as an edge for
the spanning tree. Otherwise, proceed as follows.
If a mention m is a proper name or a common noun, choose an antecedent as
follows:
- if a proper name antecedent exists, take the closest and output this
pair as an edge
- else if a common noun antecedent exists, take the closest and output
this pair as an edge
- else take the closest preceding mention and output this pair as an
edge
For all other mentions, take the closest preceding mention and output
this pair as an edge.
Args:
entity (EntityGraph): The EntityGraph for the entity for which the
spanning tree should be computed.
partitioned_entity (EntityGraph): A partition of the entity -- not
used for this algorithm.
Returns:
list(Mention, Mention): A list of mention pairs, which constitute the
edges of the spanning tree. For a pair (m, n), n appears later in
the text than m.
"""
edges = []
for mention in entity.edges:
if entity.edges[mention]:
# mention is not the first in subentity? take closest!
if mention in partitioned_entity.edges:
antecedent = sorted(partitioned_entity.edges[mention],
reverse=True)[0]
else:
antecedent = __get_antecedent_by_type(mention,
entity.edges[mention])
edges.append((mention, antecedent))
return sorted(edges) | [
"def",
"recall_accessibility",
"(",
"entity",
",",
"partitioned_entity",
")",
":",
"edges",
"=",
"[",
"]",
"for",
"mention",
"in",
"entity",
".",
"edges",
":",
"if",
"entity",
".",
"edges",
"[",
"mention",
"]",
":",
"# mention is not the first in subentity? take closest!",
"if",
"mention",
"in",
"partitioned_entity",
".",
"edges",
":",
"antecedent",
"=",
"sorted",
"(",
"partitioned_entity",
".",
"edges",
"[",
"mention",
"]",
",",
"reverse",
"=",
"True",
")",
"[",
"0",
"]",
"else",
":",
"antecedent",
"=",
"__get_antecedent_by_type",
"(",
"mention",
",",
"entity",
".",
"edges",
"[",
"mention",
"]",
")",
"edges",
".",
"append",
"(",
"(",
"mention",
",",
"antecedent",
")",
")",
"return",
"sorted",
"(",
"edges",
")"
] | https://github.com/smartschat/cort/blob/2349f0308a4115acb89d442fe945533bdb3b70e2/cort/analysis/spanning_tree_algorithms.py#L65-L110 |
|
facebookarchive/nuclide | 2a2a0a642d136768b7d2a6d35a652dc5fb77d70a | modules/atom-ide-debugger-python/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/pydev_ipython/matplotlibtools.py | python | patch_use | (enable_gui_function) | Patch matplotlib function 'use' | Patch matplotlib function 'use' | [
"Patch",
"matplotlib",
"function",
"use"
] | def patch_use(enable_gui_function):
""" Patch matplotlib function 'use' """
matplotlib = sys.modules['matplotlib']
def patched_use(*args, **kwargs):
matplotlib.real_use(*args, **kwargs)
gui, backend = find_gui_and_backend()
enable_gui_function(gui)
matplotlib.real_use = matplotlib.use
matplotlib.use = patched_use | [
"def",
"patch_use",
"(",
"enable_gui_function",
")",
":",
"matplotlib",
"=",
"sys",
".",
"modules",
"[",
"'matplotlib'",
"]",
"def",
"patched_use",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"matplotlib",
".",
"real_use",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"gui",
",",
"backend",
"=",
"find_gui_and_backend",
"(",
")",
"enable_gui_function",
"(",
"gui",
")",
"matplotlib",
".",
"real_use",
"=",
"matplotlib",
".",
"use",
"matplotlib",
".",
"use",
"=",
"patched_use"
] | https://github.com/facebookarchive/nuclide/blob/2a2a0a642d136768b7d2a6d35a652dc5fb77d70a/modules/atom-ide-debugger-python/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/pydev_ipython/matplotlibtools.py#L66-L75 |
||
JoneXiong/PyRedisAdmin | 130107d51ae4b84fbfdd89d7e748cedb721614dd | redis/client.py | python | StrictRedis.zrevrank | (self, name, value) | return self.execute_command('ZREVRANK', name, value) | Returns a 0-based value indicating the descending rank of
``value`` in sorted set ``name`` | Returns a 0-based value indicating the descending rank of
``value`` in sorted set ``name`` | [
"Returns",
"a",
"0",
"-",
"based",
"value",
"indicating",
"the",
"descending",
"rank",
"of",
"value",
"in",
"sorted",
"set",
"name"
] | def zrevrank(self, name, value):
"""
Returns a 0-based value indicating the descending rank of
``value`` in sorted set ``name``
"""
return self.execute_command('ZREVRANK', name, value) | [
"def",
"zrevrank",
"(",
"self",
",",
"name",
",",
"value",
")",
":",
"return",
"self",
".",
"execute_command",
"(",
"'ZREVRANK'",
",",
"name",
",",
"value",
")"
] | https://github.com/JoneXiong/PyRedisAdmin/blob/130107d51ae4b84fbfdd89d7e748cedb721614dd/redis/client.py#L1763-L1768 |
|
Opentrons/opentrons | 466e0567065d8773a81c25cd1b5c7998e00adf2c | api/src/opentrons/protocol_engine/commands/aspirate.py | python | AspirateImplementation.execute | (self, params: AspirateParams) | return AspirateResult(volume=volume) | Move to and aspirate from the requested well. | Move to and aspirate from the requested well. | [
"Move",
"to",
"and",
"aspirate",
"from",
"the",
"requested",
"well",
"."
] | async def execute(self, params: AspirateParams) -> AspirateResult:
"""Move to and aspirate from the requested well."""
volume = await self._pipetting.aspirate(
pipette_id=params.pipetteId,
labware_id=params.labwareId,
well_name=params.wellName,
well_location=params.wellLocation,
volume=params.volume,
)
return AspirateResult(volume=volume) | [
"async",
"def",
"execute",
"(",
"self",
",",
"params",
":",
"AspirateParams",
")",
"->",
"AspirateResult",
":",
"volume",
"=",
"await",
"self",
".",
"_pipetting",
".",
"aspirate",
"(",
"pipette_id",
"=",
"params",
".",
"pipetteId",
",",
"labware_id",
"=",
"params",
".",
"labwareId",
",",
"well_name",
"=",
"params",
".",
"wellName",
",",
"well_location",
"=",
"params",
".",
"wellLocation",
",",
"volume",
"=",
"params",
".",
"volume",
",",
")",
"return",
"AspirateResult",
"(",
"volume",
"=",
"volume",
")"
] | https://github.com/Opentrons/opentrons/blob/466e0567065d8773a81c25cd1b5c7998e00adf2c/api/src/opentrons/protocol_engine/commands/aspirate.py#L28-L38 |
|
dternyak/React-Redux-Flask | c547ca132c4ac4269850f4c813e9d4274156921b | migrations/env.py | python | run_migrations_online | () | Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context. | Run migrations in 'online' mode. | [
"Run",
"migrations",
"in",
"online",
"mode",
"."
] | def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
engine = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
connection = engine.connect()
context.configure(
connection=connection,
target_metadata=target_metadata
)
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close() | [
"def",
"run_migrations_online",
"(",
")",
":",
"engine",
"=",
"engine_from_config",
"(",
"config",
".",
"get_section",
"(",
"config",
".",
"config_ini_section",
")",
",",
"prefix",
"=",
"'sqlalchemy.'",
",",
"poolclass",
"=",
"pool",
".",
"NullPool",
")",
"connection",
"=",
"engine",
".",
"connect",
"(",
")",
"context",
".",
"configure",
"(",
"connection",
"=",
"connection",
",",
"target_metadata",
"=",
"target_metadata",
")",
"try",
":",
"with",
"context",
".",
"begin_transaction",
"(",
")",
":",
"context",
".",
"run_migrations",
"(",
")",
"finally",
":",
"connection",
".",
"close",
"(",
")"
] | https://github.com/dternyak/React-Redux-Flask/blob/c547ca132c4ac4269850f4c813e9d4274156921b/migrations/env.py#L45-L67 |
||
silklabs/silk | 08c273949086350aeddd8e23e92f0f79243f446f | node_modules/node-gyp/gyp/pylib/gyp/generator/make.py | python | CalculateGeneratorInputInfo | (params) | Calculate the generator specific info that gets fed to input (called by
gyp). | Calculate the generator specific info that gets fed to input (called by
gyp). | [
"Calculate",
"the",
"generator",
"specific",
"info",
"that",
"gets",
"fed",
"to",
"input",
"(",
"called",
"by",
"gyp",
")",
"."
] | def CalculateGeneratorInputInfo(params):
"""Calculate the generator specific info that gets fed to input (called by
gyp)."""
generator_flags = params.get('generator_flags', {})
android_ndk_version = generator_flags.get('android_ndk_version', None)
# Android NDK requires a strict link order.
if android_ndk_version:
global generator_wants_sorted_dependencies
generator_wants_sorted_dependencies = True
output_dir = params['options'].generator_output or \
params['options'].toplevel_dir
builddir_name = generator_flags.get('output_dir', 'out')
qualified_out_dir = os.path.normpath(os.path.join(
output_dir, builddir_name, 'gypfiles'))
global generator_filelist_paths
generator_filelist_paths = {
'toplevel': params['options'].toplevel_dir,
'qualified_out_dir': qualified_out_dir,
} | [
"def",
"CalculateGeneratorInputInfo",
"(",
"params",
")",
":",
"generator_flags",
"=",
"params",
".",
"get",
"(",
"'generator_flags'",
",",
"{",
"}",
")",
"android_ndk_version",
"=",
"generator_flags",
".",
"get",
"(",
"'android_ndk_version'",
",",
"None",
")",
"# Android NDK requires a strict link order.",
"if",
"android_ndk_version",
":",
"global",
"generator_wants_sorted_dependencies",
"generator_wants_sorted_dependencies",
"=",
"True",
"output_dir",
"=",
"params",
"[",
"'options'",
"]",
".",
"generator_output",
"or",
"params",
"[",
"'options'",
"]",
".",
"toplevel_dir",
"builddir_name",
"=",
"generator_flags",
".",
"get",
"(",
"'output_dir'",
",",
"'out'",
")",
"qualified_out_dir",
"=",
"os",
".",
"path",
".",
"normpath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"output_dir",
",",
"builddir_name",
",",
"'gypfiles'",
")",
")",
"global",
"generator_filelist_paths",
"generator_filelist_paths",
"=",
"{",
"'toplevel'",
":",
"params",
"[",
"'options'",
"]",
".",
"toplevel_dir",
",",
"'qualified_out_dir'",
":",
"qualified_out_dir",
",",
"}"
] | https://github.com/silklabs/silk/blob/08c273949086350aeddd8e23e92f0f79243f446f/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py#L98-L118 |
||
cBioPortal/cbioportal | e7a279de809e2fd3af02afeea2317135ec81ffcf | core/src/main/scripts/importer/validateData.py | python | GenePanelMatrixValidator.__init__ | (self, *args, **kwargs) | Initialize a GenePanelMatrixValidator with the given parameters. | Initialize a GenePanelMatrixValidator with the given parameters. | [
"Initialize",
"a",
"GenePanelMatrixValidator",
"with",
"the",
"given",
"parameters",
"."
] | def __init__(self, *args, **kwargs):
"""Initialize a GenePanelMatrixValidator with the given parameters."""
super(GenePanelMatrixValidator, self).__init__(*args, **kwargs)
self.mutation_profile_column = None
self.gene_panel_sample_ids = {}
self.mutation_stable_id_index = None | [
"def",
"__init__",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"super",
"(",
"GenePanelMatrixValidator",
",",
"self",
")",
".",
"__init__",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"self",
".",
"mutation_profile_column",
"=",
"None",
"self",
".",
"gene_panel_sample_ids",
"=",
"{",
"}",
"self",
".",
"mutation_stable_id_index",
"=",
"None"
] | https://github.com/cBioPortal/cbioportal/blob/e7a279de809e2fd3af02afeea2317135ec81ffcf/core/src/main/scripts/importer/validateData.py#L3302-L3307 |
||
Nexedi/erp5 | 44df1959c0e21576cf5e9803d602d95efb4b695b | bt5/erp5_base/DocumentTemplateItem/portal_components/document.erp5.SimulatedDeliveryBuilder.py | python | SimulatedDeliveryBuilder._createDelivery | (self, delivery_module, movement_list, activate_kw) | return delivery | Refer to the docstring in GeneratedDeliveryBuilder.
Unlike GeneratedDeliveryBuilder, SimulatedDeliveryBuilder needs to respect
existing relationship. | Refer to the docstring in GeneratedDeliveryBuilder.
Unlike GeneratedDeliveryBuilder, SimulatedDeliveryBuilder needs to respect
existing relationship. | [
"Refer",
"to",
"the",
"docstring",
"in",
"GeneratedDeliveryBuilder",
".",
"Unlike",
"GeneratedDeliveryBuilder",
"SimulatedDeliveryBuilder",
"needs",
"to",
"respect",
"existing",
"relationship",
"."
] | def _createDelivery(self, delivery_module, movement_list, activate_kw): # pylint: disable=super-on-old-class
"""
Refer to the docstring in GeneratedDeliveryBuilder.
Unlike GeneratedDeliveryBuilder, SimulatedDeliveryBuilder needs to respect
existing relationship.
"""
try:
old_delivery = self._searchUpByPortalType(
movement_list[0].getDeliveryValue(),
self.getDeliveryPortalType())
except AttributeError:
old_delivery = None
if old_delivery is None:
# from scratch
delivery = super(SimulatedDeliveryBuilder, self)._createDelivery(
delivery_module, movement_list, activate_kw)
# Interactions will usually trigger reindexing of related SM when
# simulation state changes. Disable them for this transaction
# because we already do this in _setDeliveryMovementProperties
delivery.updateSimulation(index_related=0)
else:
# from duplicated original delivery
cp = tryMethodCallWithTemporaryPermission(
delivery_module, 'Copy or Move',
lambda parent, *ids:
parent._duplicate(parent.manage_copyObjects(ids=ids))[0],
(delivery_module, old_delivery.getId()), {}, CopyError)
delivery = delivery_module[cp['new_id']]
# delete non-split movements
keep_id_list = [y.getDeliveryValue().getId() for y in movement_list]
delete_id_list = [x.getId() for x in delivery.contentValues() \
if x.getId() not in keep_id_list]
delivery.deleteContent(delete_id_list)
return delivery | [
"def",
"_createDelivery",
"(",
"self",
",",
"delivery_module",
",",
"movement_list",
",",
"activate_kw",
")",
":",
"# pylint: disable=super-on-old-class",
"try",
":",
"old_delivery",
"=",
"self",
".",
"_searchUpByPortalType",
"(",
"movement_list",
"[",
"0",
"]",
".",
"getDeliveryValue",
"(",
")",
",",
"self",
".",
"getDeliveryPortalType",
"(",
")",
")",
"except",
"AttributeError",
":",
"old_delivery",
"=",
"None",
"if",
"old_delivery",
"is",
"None",
":",
"# from scratch",
"delivery",
"=",
"super",
"(",
"SimulatedDeliveryBuilder",
",",
"self",
")",
".",
"_createDelivery",
"(",
"delivery_module",
",",
"movement_list",
",",
"activate_kw",
")",
"# Interactions will usually trigger reindexing of related SM when",
"# simulation state changes. Disable them for this transaction",
"# because we already do this in _setDeliveryMovementProperties",
"delivery",
".",
"updateSimulation",
"(",
"index_related",
"=",
"0",
")",
"else",
":",
"# from duplicated original delivery",
"cp",
"=",
"tryMethodCallWithTemporaryPermission",
"(",
"delivery_module",
",",
"'Copy or Move'",
",",
"lambda",
"parent",
",",
"*",
"ids",
":",
"parent",
".",
"_duplicate",
"(",
"parent",
".",
"manage_copyObjects",
"(",
"ids",
"=",
"ids",
")",
")",
"[",
"0",
"]",
",",
"(",
"delivery_module",
",",
"old_delivery",
".",
"getId",
"(",
")",
")",
",",
"{",
"}",
",",
"CopyError",
")",
"delivery",
"=",
"delivery_module",
"[",
"cp",
"[",
"'new_id'",
"]",
"]",
"# delete non-split movements",
"keep_id_list",
"=",
"[",
"y",
".",
"getDeliveryValue",
"(",
")",
".",
"getId",
"(",
")",
"for",
"y",
"in",
"movement_list",
"]",
"delete_id_list",
"=",
"[",
"x",
".",
"getId",
"(",
")",
"for",
"x",
"in",
"delivery",
".",
"contentValues",
"(",
")",
"if",
"x",
".",
"getId",
"(",
")",
"not",
"in",
"keep_id_list",
"]",
"delivery",
".",
"deleteContent",
"(",
"delete_id_list",
")",
"return",
"delivery"
] | https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/bt5/erp5_base/DocumentTemplateItem/portal_components/document.erp5.SimulatedDeliveryBuilder.py#L343-L377 |
|
wotermelon/toJump | 3dcec5cb5d91387d415b805d015ab8d2e6ffcf5f | lib/mac/systrace/catapult/systrace/systrace/tracing_agents/atrace_agent.py | python | AtraceAgent.GetResults | (self, timeout=None) | return trace_result.TraceResult('systemTraceEvents', self._trace_data) | Waits for collection thread to finish and returns trace results. | Waits for collection thread to finish and returns trace results. | [
"Waits",
"for",
"collection",
"thread",
"to",
"finish",
"and",
"returns",
"trace",
"results",
"."
] | def GetResults(self, timeout=None):
"""Waits for collection thread to finish and returns trace results."""
self._collection_thread.join()
self._collection_thread = None
return trace_result.TraceResult('systemTraceEvents', self._trace_data) | [
"def",
"GetResults",
"(",
"self",
",",
"timeout",
"=",
"None",
")",
":",
"self",
".",
"_collection_thread",
".",
"join",
"(",
")",
"self",
".",
"_collection_thread",
"=",
"None",
"return",
"trace_result",
".",
"TraceResult",
"(",
"'systemTraceEvents'",
",",
"self",
".",
"_trace_data",
")"
] | https://github.com/wotermelon/toJump/blob/3dcec5cb5d91387d415b805d015ab8d2e6ffcf5f/lib/mac/systrace/catapult/systrace/systrace/tracing_agents/atrace_agent.py#L209-L213 |
|
mozilla/spidernode | aafa9e5273f954f272bb4382fc007af14674b4c2 | deps/v8/third_party/jinja2/meta.py | python | find_referenced_templates | (ast) | Finds all the referenced templates from the AST. This will return an
iterator over all the hardcoded template extensions, inclusions and
imports. If dynamic inheritance or inclusion is used, `None` will be
yielded.
>>> from jinja2 import Environment, meta
>>> env = Environment()
>>> ast = env.parse('{% extends "layout.html" %}{% include helper %}')
>>> list(meta.find_referenced_templates(ast))
['layout.html', None]
This function is useful for dependency tracking. For example if you want
to rebuild parts of the website after a layout template has changed. | Finds all the referenced templates from the AST. This will return an
iterator over all the hardcoded template extensions, inclusions and
imports. If dynamic inheritance or inclusion is used, `None` will be
yielded. | [
"Finds",
"all",
"the",
"referenced",
"templates",
"from",
"the",
"AST",
".",
"This",
"will",
"return",
"an",
"iterator",
"over",
"all",
"the",
"hardcoded",
"template",
"extensions",
"inclusions",
"and",
"imports",
".",
"If",
"dynamic",
"inheritance",
"or",
"inclusion",
"is",
"used",
"None",
"will",
"be",
"yielded",
"."
] | def find_referenced_templates(ast):
"""Finds all the referenced templates from the AST. This will return an
iterator over all the hardcoded template extensions, inclusions and
imports. If dynamic inheritance or inclusion is used, `None` will be
yielded.
>>> from jinja2 import Environment, meta
>>> env = Environment()
>>> ast = env.parse('{% extends "layout.html" %}{% include helper %}')
>>> list(meta.find_referenced_templates(ast))
['layout.html', None]
This function is useful for dependency tracking. For example if you want
to rebuild parts of the website after a layout template has changed.
"""
for node in ast.find_all((nodes.Extends, nodes.FromImport, nodes.Import,
nodes.Include)):
if not isinstance(node.template, nodes.Const):
# a tuple with some non consts in there
if isinstance(node.template, (nodes.Tuple, nodes.List)):
for template_name in node.template.items:
# something const, only yield the strings and ignore
# non-string consts that really just make no sense
if isinstance(template_name, nodes.Const):
if isinstance(template_name.value, string_types):
yield template_name.value
# something dynamic in there
else:
yield None
# something dynamic we don't know about here
else:
yield None
continue
# constant is a basestring, direct template name
if isinstance(node.template.value, string_types):
yield node.template.value
# a tuple or list (latter *should* not happen) made of consts,
# yield the consts that are strings. We could warn here for
# non string values
elif isinstance(node, nodes.Include) and \
isinstance(node.template.value, (tuple, list)):
for template_name in node.template.value:
if isinstance(template_name, string_types):
yield template_name
# something else we don't care about, we could warn here
else:
yield None | [
"def",
"find_referenced_templates",
"(",
"ast",
")",
":",
"for",
"node",
"in",
"ast",
".",
"find_all",
"(",
"(",
"nodes",
".",
"Extends",
",",
"nodes",
".",
"FromImport",
",",
"nodes",
".",
"Import",
",",
"nodes",
".",
"Include",
")",
")",
":",
"if",
"not",
"isinstance",
"(",
"node",
".",
"template",
",",
"nodes",
".",
"Const",
")",
":",
"# a tuple with some non consts in there",
"if",
"isinstance",
"(",
"node",
".",
"template",
",",
"(",
"nodes",
".",
"Tuple",
",",
"nodes",
".",
"List",
")",
")",
":",
"for",
"template_name",
"in",
"node",
".",
"template",
".",
"items",
":",
"# something const, only yield the strings and ignore",
"# non-string consts that really just make no sense",
"if",
"isinstance",
"(",
"template_name",
",",
"nodes",
".",
"Const",
")",
":",
"if",
"isinstance",
"(",
"template_name",
".",
"value",
",",
"string_types",
")",
":",
"yield",
"template_name",
".",
"value",
"# something dynamic in there",
"else",
":",
"yield",
"None",
"# something dynamic we don't know about here",
"else",
":",
"yield",
"None",
"continue",
"# constant is a basestring, direct template name",
"if",
"isinstance",
"(",
"node",
".",
"template",
".",
"value",
",",
"string_types",
")",
":",
"yield",
"node",
".",
"template",
".",
"value",
"# a tuple or list (latter *should* not happen) made of consts,",
"# yield the consts that are strings. We could warn here for",
"# non string values",
"elif",
"isinstance",
"(",
"node",
",",
"nodes",
".",
"Include",
")",
"and",
"isinstance",
"(",
"node",
".",
"template",
".",
"value",
",",
"(",
"tuple",
",",
"list",
")",
")",
":",
"for",
"template_name",
"in",
"node",
".",
"template",
".",
"value",
":",
"if",
"isinstance",
"(",
"template_name",
",",
"string_types",
")",
":",
"yield",
"template_name",
"# something else we don't care about, we could warn here",
"else",
":",
"yield",
"None"
] | https://github.com/mozilla/spidernode/blob/aafa9e5273f954f272bb4382fc007af14674b4c2/deps/v8/third_party/jinja2/meta.py#L57-L103 |
||
ayojs/ayo | 45a1c8cf6384f5bcc81d834343c3ed9d78b97df3 | deps/v8/gypfiles/landmine_utils.py | python | gyp_defines | () | return dict(arg.split('=', 1)
for arg in shlex.split(os.environ.get('GYP_DEFINES', ''))) | Parses and returns GYP_DEFINES env var as a dictionary. | Parses and returns GYP_DEFINES env var as a dictionary. | [
"Parses",
"and",
"returns",
"GYP_DEFINES",
"env",
"var",
"as",
"a",
"dictionary",
"."
] | def gyp_defines():
"""Parses and returns GYP_DEFINES env var as a dictionary."""
return dict(arg.split('=', 1)
for arg in shlex.split(os.environ.get('GYP_DEFINES', ''))) | [
"def",
"gyp_defines",
"(",
")",
":",
"return",
"dict",
"(",
"arg",
".",
"split",
"(",
"'='",
",",
"1",
")",
"for",
"arg",
"in",
"shlex",
".",
"split",
"(",
"os",
".",
"environ",
".",
"get",
"(",
"'GYP_DEFINES'",
",",
"''",
")",
")",
")"
] | https://github.com/ayojs/ayo/blob/45a1c8cf6384f5bcc81d834343c3ed9d78b97df3/deps/v8/gypfiles/landmine_utils.py#L45-L48 |
|
ElasticHQ/elasticsearch-HQ | 8197e21d09b1312492dcb6998a2349d73b06efc6 | elastichq/vendor/elasticsearch_dsl/v5/elasticsearch_dsl/index.py | python | Index.put_mapping | (self, **kwargs) | return self.connection.indices.put_mapping(index=self._name, **kwargs) | Register specific mapping definition for a specific type.
Any additional keyword arguments will be passed to
``Elasticsearch.indices.put_mapping`` unchanged. | Register specific mapping definition for a specific type. | [
"Register",
"specific",
"mapping",
"definition",
"for",
"a",
"specific",
"type",
"."
] | def put_mapping(self, **kwargs):
"""
Register specific mapping definition for a specific type.
Any additional keyword arguments will be passed to
``Elasticsearch.indices.put_mapping`` unchanged.
"""
return self.connection.indices.put_mapping(index=self._name, **kwargs) | [
"def",
"put_mapping",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
".",
"connection",
".",
"indices",
".",
"put_mapping",
"(",
"index",
"=",
"self",
".",
"_name",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/ElasticHQ/elasticsearch-HQ/blob/8197e21d09b1312492dcb6998a2349d73b06efc6/elastichq/vendor/elasticsearch_dsl/v5/elasticsearch_dsl/index.py#L334-L341 |
|
TeamvisionCorp/TeamVision | aa2a57469e430ff50cce21174d8f280efa0a83a7 | distribute/0.0.4/build_shell/teamvision/teamvision/ci/viewmodels/vm_ci_service_replace_file.py | python | VM_CIServiceRPFile.__init__ | (self,service_file,selected_files) | Constructor | Constructor | [
"Constructor"
] | def __init__(self,service_file,selected_files):
'''
Constructor
'''
self.service_file=service_file
self.selected_files=selected_files | [
"def",
"__init__",
"(",
"self",
",",
"service_file",
",",
"selected_files",
")",
":",
"self",
".",
"service_file",
"=",
"service_file",
"self",
".",
"selected_files",
"=",
"selected_files"
] | https://github.com/TeamvisionCorp/TeamVision/blob/aa2a57469e430ff50cce21174d8f280efa0a83a7/distribute/0.0.4/build_shell/teamvision/teamvision/ci/viewmodels/vm_ci_service_replace_file.py#L18-L23 |
||
mceSystems/node-jsc | 90634f3064fab8e89a85b3942f0cc5054acc86fa | deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py | python | CalculateVariables | (default_variables, params) | Generated variables that require params to be known. | Generated variables that require params to be known. | [
"Generated",
"variables",
"that",
"require",
"params",
"to",
"be",
"known",
"."
] | def CalculateVariables(default_variables, params):
"""Generated variables that require params to be known."""
generator_flags = params.get('generator_flags', {})
# Select project file format version (if unset, default to auto detecting).
msvs_version = MSVSVersion.SelectVisualStudioVersion(
generator_flags.get('msvs_version', 'auto'))
# Stash msvs_version for later (so we don't have to probe the system twice).
params['msvs_version'] = msvs_version
# Set a variable so conditions can be based on msvs_version.
default_variables['MSVS_VERSION'] = msvs_version.ShortName()
# To determine processor word size on Windows, in addition to checking
# PROCESSOR_ARCHITECTURE (which reflects the word size of the current
# process), it is also necessary to check PROCESSOR_ARCITEW6432 (which
# contains the actual word size of the system when running thru WOW64).
if (os.environ.get('PROCESSOR_ARCHITECTURE', '').find('64') >= 0 or
os.environ.get('PROCESSOR_ARCHITEW6432', '').find('64') >= 0):
default_variables['MSVS_OS_BITS'] = 64
else:
default_variables['MSVS_OS_BITS'] = 32
if gyp.common.GetFlavor(params) == 'ninja':
default_variables['SHARED_INTERMEDIATE_DIR'] = '$(OutDir)gen' | [
"def",
"CalculateVariables",
"(",
"default_variables",
",",
"params",
")",
":",
"generator_flags",
"=",
"params",
".",
"get",
"(",
"'generator_flags'",
",",
"{",
"}",
")",
"# Select project file format version (if unset, default to auto detecting).",
"msvs_version",
"=",
"MSVSVersion",
".",
"SelectVisualStudioVersion",
"(",
"generator_flags",
".",
"get",
"(",
"'msvs_version'",
",",
"'auto'",
")",
")",
"# Stash msvs_version for later (so we don't have to probe the system twice).",
"params",
"[",
"'msvs_version'",
"]",
"=",
"msvs_version",
"# Set a variable so conditions can be based on msvs_version.",
"default_variables",
"[",
"'MSVS_VERSION'",
"]",
"=",
"msvs_version",
".",
"ShortName",
"(",
")",
"# To determine processor word size on Windows, in addition to checking",
"# PROCESSOR_ARCHITECTURE (which reflects the word size of the current",
"# process), it is also necessary to check PROCESSOR_ARCITEW6432 (which",
"# contains the actual word size of the system when running thru WOW64).",
"if",
"(",
"os",
".",
"environ",
".",
"get",
"(",
"'PROCESSOR_ARCHITECTURE'",
",",
"''",
")",
".",
"find",
"(",
"'64'",
")",
">=",
"0",
"or",
"os",
".",
"environ",
".",
"get",
"(",
"'PROCESSOR_ARCHITEW6432'",
",",
"''",
")",
".",
"find",
"(",
"'64'",
")",
">=",
"0",
")",
":",
"default_variables",
"[",
"'MSVS_OS_BITS'",
"]",
"=",
"64",
"else",
":",
"default_variables",
"[",
"'MSVS_OS_BITS'",
"]",
"=",
"32",
"if",
"gyp",
".",
"common",
".",
"GetFlavor",
"(",
"params",
")",
"==",
"'ninja'",
":",
"default_variables",
"[",
"'SHARED_INTERMEDIATE_DIR'",
"]",
"=",
"'$(OutDir)gen'"
] | https://github.com/mceSystems/node-jsc/blob/90634f3064fab8e89a85b3942f0cc5054acc86fa/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py#L1909-L1934 |
||
Nexedi/erp5 | 44df1959c0e21576cf5e9803d602d95efb4b695b | product/ERP5/Tool/IdTool.py | python | IdTool.dumpDictLengthIdsItems | (self) | Store persistently data from SQL table portal_ids. | Store persistently data from SQL table portal_ids. | [
"Store",
"persistently",
"data",
"from",
"SQL",
"table",
"portal_ids",
"."
] | def dumpDictLengthIdsItems(self):
"""
Store persistently data from SQL table portal_ids.
"""
portal_catalog = getattr(self, 'portal_catalog').getSQLCatalog()
query = getattr(portal_catalog, 'z_portal_ids_dump')
dict_length_ids = getattr(aq_base(self), 'dict_length_ids', None)
if dict_length_ids is None:
dict_length_ids = self.dict_length_ids = PersistentMapping()
for line in query().dictionaries():
id_group = line['id_group']
last_id = line['last_id']
stored_last_id = self.dict_length_ids.get(id_group)
if stored_last_id is None:
self.dict_length_ids[id_group] = Length(last_id)
else:
stored_last_id_value = stored_last_id()
if stored_last_id_value < last_id:
stored_last_id.set(last_id)
else:
if stored_last_id_value > last_id:
LOG('IdTool', WARNING, 'ZODB value (%r) for group %r is higher ' \
'than SQL value (%r). Keeping ZODB value untouched.' % \
(stored_last_id, id_group, last_id)) | [
"def",
"dumpDictLengthIdsItems",
"(",
"self",
")",
":",
"portal_catalog",
"=",
"getattr",
"(",
"self",
",",
"'portal_catalog'",
")",
".",
"getSQLCatalog",
"(",
")",
"query",
"=",
"getattr",
"(",
"portal_catalog",
",",
"'z_portal_ids_dump'",
")",
"dict_length_ids",
"=",
"getattr",
"(",
"aq_base",
"(",
"self",
")",
",",
"'dict_length_ids'",
",",
"None",
")",
"if",
"dict_length_ids",
"is",
"None",
":",
"dict_length_ids",
"=",
"self",
".",
"dict_length_ids",
"=",
"PersistentMapping",
"(",
")",
"for",
"line",
"in",
"query",
"(",
")",
".",
"dictionaries",
"(",
")",
":",
"id_group",
"=",
"line",
"[",
"'id_group'",
"]",
"last_id",
"=",
"line",
"[",
"'last_id'",
"]",
"stored_last_id",
"=",
"self",
".",
"dict_length_ids",
".",
"get",
"(",
"id_group",
")",
"if",
"stored_last_id",
"is",
"None",
":",
"self",
".",
"dict_length_ids",
"[",
"id_group",
"]",
"=",
"Length",
"(",
"last_id",
")",
"else",
":",
"stored_last_id_value",
"=",
"stored_last_id",
"(",
")",
"if",
"stored_last_id_value",
"<",
"last_id",
":",
"stored_last_id",
".",
"set",
"(",
"last_id",
")",
"else",
":",
"if",
"stored_last_id_value",
">",
"last_id",
":",
"LOG",
"(",
"'IdTool'",
",",
"WARNING",
",",
"'ZODB value (%r) for group %r is higher '",
"'than SQL value (%r). Keeping ZODB value untouched.'",
"%",
"(",
"stored_last_id",
",",
"id_group",
",",
"last_id",
")",
")"
] | https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/product/ERP5/Tool/IdTool.py#L368-L391 |
||
Sefaria/Sefaria-Project | 506752f49394fadebae283d525af8276eb2e241e | sefaria/helper/category.py | python | create_category | (path, en=None, he=None, searchRoot=None) | return c | Will create a new category at the location in the TOC indicated by `path`.
If there is a term for `path[-1]`, then that term will be used for this category.
Otherwise, a new Term will be created with titles `en` and `he`.
:param path: (List) the full path of the category to create
:param en: (String, optional)
:param he: (String, optional)
:param searchRoot: (String, optional) If this is present, then in the context of search filters, this category will appear under `searchRoot`.
:return: (model.Category) the new category object | Will create a new category at the location in the TOC indicated by `path`.
If there is a term for `path[-1]`, then that term will be used for this category.
Otherwise, a new Term will be created with titles `en` and `he`. | [
"Will",
"create",
"a",
"new",
"category",
"at",
"the",
"location",
"in",
"the",
"TOC",
"indicated",
"by",
"path",
".",
"If",
"there",
"is",
"a",
"term",
"for",
"path",
"[",
"-",
"1",
"]",
"then",
"that",
"term",
"will",
"be",
"used",
"for",
"this",
"category",
".",
"Otherwise",
"a",
"new",
"Term",
"will",
"be",
"created",
"with",
"titles",
"en",
"and",
"he",
"."
] | def create_category(path, en=None, he=None, searchRoot=None):
"""
Will create a new category at the location in the TOC indicated by `path`.
If there is a term for `path[-1]`, then that term will be used for this category.
Otherwise, a new Term will be created with titles `en` and `he`.
:param path: (List) the full path of the category to create
:param en: (String, optional)
:param he: (String, optional)
:param searchRoot: (String, optional) If this is present, then in the context of search filters, this category will appear under `searchRoot`.
:return: (model.Category) the new category object
"""
c = Category()
if not Term().load({"name": path[-1]}):
if en is None or he is None:
raise Exception("Need term names for {}".format(path[-1]))
print("adding term for " + en)
term = Term()
term.name = en
term.add_primary_titles(en, he)
term.scheme = "toc_categories"
term.save()
c.add_shared_term(path[-1])
c.path = path
c.lastPath = path[-1]
if searchRoot is not None:
c.searchRoot = searchRoot
print("Creating - {}".format(" / ".join(c.path)))
c.save(override_dependencies=True)
return c | [
"def",
"create_category",
"(",
"path",
",",
"en",
"=",
"None",
",",
"he",
"=",
"None",
",",
"searchRoot",
"=",
"None",
")",
":",
"c",
"=",
"Category",
"(",
")",
"if",
"not",
"Term",
"(",
")",
".",
"load",
"(",
"{",
"\"name\"",
":",
"path",
"[",
"-",
"1",
"]",
"}",
")",
":",
"if",
"en",
"is",
"None",
"or",
"he",
"is",
"None",
":",
"raise",
"Exception",
"(",
"\"Need term names for {}\"",
".",
"format",
"(",
"path",
"[",
"-",
"1",
"]",
")",
")",
"print",
"(",
"\"adding term for \"",
"+",
"en",
")",
"term",
"=",
"Term",
"(",
")",
"term",
".",
"name",
"=",
"en",
"term",
".",
"add_primary_titles",
"(",
"en",
",",
"he",
")",
"term",
".",
"scheme",
"=",
"\"toc_categories\"",
"term",
".",
"save",
"(",
")",
"c",
".",
"add_shared_term",
"(",
"path",
"[",
"-",
"1",
"]",
")",
"c",
".",
"path",
"=",
"path",
"c",
".",
"lastPath",
"=",
"path",
"[",
"-",
"1",
"]",
"if",
"searchRoot",
"is",
"not",
"None",
":",
"c",
".",
"searchRoot",
"=",
"searchRoot",
"print",
"(",
"\"Creating - {}\"",
".",
"format",
"(",
"\" / \"",
".",
"join",
"(",
"c",
".",
"path",
")",
")",
")",
"c",
".",
"save",
"(",
"override_dependencies",
"=",
"True",
")",
"return",
"c"
] | https://github.com/Sefaria/Sefaria-Project/blob/506752f49394fadebae283d525af8276eb2e241e/sefaria/helper/category.py#L125-L154 |
|
catmaid/CATMAID | 9f3312f2eacfc6fab48e4c6f1bd24672cc9c9ecf | django/applications/catmaid/control/project.py | python | validate_project_setup | (project_id, user_id, fix=False,
class_model=None, rel_model=None, datastore_model=None) | return missing_classes, missing_relations, missing_datastores | Will check if needed classes and relations exist for every project. If
<fix> is truthy, missing objects will be added. | Will check if needed classes and relations exist for every project. If
<fix> is truthy, missing objects will be added. | [
"Will",
"check",
"if",
"needed",
"classes",
"and",
"relations",
"exist",
"for",
"every",
"project",
".",
"If",
"<fix",
">",
"is",
"truthy",
"missing",
"objects",
"will",
"be",
"added",
"."
] | def validate_project_setup(project_id, user_id, fix=False,
class_model=None, rel_model=None, datastore_model=None) -> Tuple[List, List, List]:
"""Will check if needed classes and relations exist for every project. If
<fix> is truthy, missing objects will be added.
"""
missing_classes = []
missing_relations = []
missing_datastores = []
class_model = class_model or Class
rel_model = rel_model or Relation
datastore_model = datastore_model or ClientDatastore
for nc, desc in needed_classes.items():
try:
class_model.objects.get(project_id=project_id, class_name=nc)
except class_model.DoesNotExist:
missing_classes.append(nc)
if fix:
class_model.objects.create(project_id=project_id,
class_name=nc, user_id=user_id)
for nr, desc in needed_relations.items():
try:
rel_model.objects.get(project_id=project_id, relation_name=nr)
except rel_model.DoesNotExist:
missing_relations.append(nr)
if fix:
rel_model.objects.get_or_create(project_id=project_id,
relation_name=nr, defaults={'user_id': user_id, 'description': desc})
for nd, desc in needed_datastores.items():
exists = datastore_model.objects.filter(name=nd).exists()
if not exists:
missing_datastores.append(nd)
if fix:
datastore_model.objects.get_or_create(name=nd)
return missing_classes, missing_relations, missing_datastores | [
"def",
"validate_project_setup",
"(",
"project_id",
",",
"user_id",
",",
"fix",
"=",
"False",
",",
"class_model",
"=",
"None",
",",
"rel_model",
"=",
"None",
",",
"datastore_model",
"=",
"None",
")",
"->",
"Tuple",
"[",
"List",
",",
"List",
",",
"List",
"]",
":",
"missing_classes",
"=",
"[",
"]",
"missing_relations",
"=",
"[",
"]",
"missing_datastores",
"=",
"[",
"]",
"class_model",
"=",
"class_model",
"or",
"Class",
"rel_model",
"=",
"rel_model",
"or",
"Relation",
"datastore_model",
"=",
"datastore_model",
"or",
"ClientDatastore",
"for",
"nc",
",",
"desc",
"in",
"needed_classes",
".",
"items",
"(",
")",
":",
"try",
":",
"class_model",
".",
"objects",
".",
"get",
"(",
"project_id",
"=",
"project_id",
",",
"class_name",
"=",
"nc",
")",
"except",
"class_model",
".",
"DoesNotExist",
":",
"missing_classes",
".",
"append",
"(",
"nc",
")",
"if",
"fix",
":",
"class_model",
".",
"objects",
".",
"create",
"(",
"project_id",
"=",
"project_id",
",",
"class_name",
"=",
"nc",
",",
"user_id",
"=",
"user_id",
")",
"for",
"nr",
",",
"desc",
"in",
"needed_relations",
".",
"items",
"(",
")",
":",
"try",
":",
"rel_model",
".",
"objects",
".",
"get",
"(",
"project_id",
"=",
"project_id",
",",
"relation_name",
"=",
"nr",
")",
"except",
"rel_model",
".",
"DoesNotExist",
":",
"missing_relations",
".",
"append",
"(",
"nr",
")",
"if",
"fix",
":",
"rel_model",
".",
"objects",
".",
"get_or_create",
"(",
"project_id",
"=",
"project_id",
",",
"relation_name",
"=",
"nr",
",",
"defaults",
"=",
"{",
"'user_id'",
":",
"user_id",
",",
"'description'",
":",
"desc",
"}",
")",
"for",
"nd",
",",
"desc",
"in",
"needed_datastores",
".",
"items",
"(",
")",
":",
"exists",
"=",
"datastore_model",
".",
"objects",
".",
"filter",
"(",
"name",
"=",
"nd",
")",
".",
"exists",
"(",
")",
"if",
"not",
"exists",
":",
"missing_datastores",
".",
"append",
"(",
"nd",
")",
"if",
"fix",
":",
"datastore_model",
".",
"objects",
".",
"get_or_create",
"(",
"name",
"=",
"nd",
")",
"return",
"missing_classes",
",",
"missing_relations",
",",
"missing_datastores"
] | https://github.com/catmaid/CATMAID/blob/9f3312f2eacfc6fab48e4c6f1bd24672cc9c9ecf/django/applications/catmaid/control/project.py#L55-L93 |
|
qwebirc/qwebirc | c3d5467287d131d55c6112c7c5b8731dd80675fb | esimplejson/decoder.py | python | JSONDecoder.decode | (self, s, _w=WHITESPACE.match) | return obj | Return the Python representation of ``s`` (a ``str`` or ``unicode``
instance containing a JSON document) | Return the Python representation of ``s`` (a ``str`` or ``unicode``
instance containing a JSON document) | [
"Return",
"the",
"Python",
"representation",
"of",
"s",
"(",
"a",
"str",
"or",
"unicode",
"instance",
"containing",
"a",
"JSON",
"document",
")"
] | def decode(self, s, _w=WHITESPACE.match):
"""
Return the Python representation of ``s`` (a ``str`` or ``unicode``
instance containing a JSON document)
"""
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
end = _w(s, end).end()
if end != len(s):
raise ValueError(errmsg("Extra data", s, end, len(s)))
return obj | [
"def",
"decode",
"(",
"self",
",",
"s",
",",
"_w",
"=",
"WHITESPACE",
".",
"match",
")",
":",
"obj",
",",
"end",
"=",
"self",
".",
"raw_decode",
"(",
"s",
",",
"idx",
"=",
"_w",
"(",
"s",
",",
"0",
")",
".",
"end",
"(",
")",
")",
"end",
"=",
"_w",
"(",
"s",
",",
"end",
")",
".",
"end",
"(",
")",
"if",
"end",
"!=",
"len",
"(",
"s",
")",
":",
"raise",
"ValueError",
"(",
"errmsg",
"(",
"\"Extra data\"",
",",
"s",
",",
"end",
",",
"len",
"(",
"s",
")",
")",
")",
"return",
"obj"
] | https://github.com/qwebirc/qwebirc/blob/c3d5467287d131d55c6112c7c5b8731dd80675fb/esimplejson/decoder.py#L316-L325 |
|
Nexedi/erp5 | 44df1959c0e21576cf5e9803d602d95efb4b695b | bt5/erp5_calendar/DocumentTemplateItem/portal_components/document.erp5.PresencePeriod.py | python | PresencePeriod.isAccountable | (self) | return 1 | For now, consider that it's always accountable | For now, consider that it's always accountable | [
"For",
"now",
"consider",
"that",
"it",
"s",
"always",
"accountable"
] | def isAccountable(self):
"""
For now, consider that it's always accountable
"""
return 1 | [
"def",
"isAccountable",
"(",
"self",
")",
":",
"return",
"1"
] | https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/bt5/erp5_calendar/DocumentTemplateItem/portal_components/document.erp5.PresencePeriod.py#L70-L74 |
|
Nexedi/erp5 | 44df1959c0e21576cf5e9803d602d95efb4b695b | product/ERP5Type/Cache.py | python | CachingMethod.__init__ | (self, callable_object, id, cache_duration=180,
cache_factory=DEFAULT_CACHE_FACTORY,
cache_id_generator=_default_cache_id_generator) | Wrap a callable object in a caching method.
callable_object must be callable.
id is used to identify what call should be treated as the same call.
cache_duration is an old argument kept for backwards compatibility.
cache_duration is specified per cache factory.
cache_factory is the id of the cache_factory to use. | Wrap a callable object in a caching method. | [
"Wrap",
"a",
"callable",
"object",
"in",
"a",
"caching",
"method",
"."
] | def __init__(self, callable_object, id, cache_duration=180,
cache_factory=DEFAULT_CACHE_FACTORY,
cache_id_generator=_default_cache_id_generator):
"""Wrap a callable object in a caching method.
callable_object must be callable.
id is used to identify what call should be treated as the same call.
cache_duration is an old argument kept for backwards compatibility.
cache_duration is specified per cache factory.
cache_factory is the id of the cache_factory to use.
"""
if not callable(callable_object):
raise CachedMethodError("callable_object %r is not callable"
% (callable_object,))
if not id:
raise CachedMethodError("id must be specified")
self.id = id
self.callable_object = callable_object
self.cache_duration = cache_duration
self.cache_factory = cache_factory
self.generateCacheId = cache_id_generator | [
"def",
"__init__",
"(",
"self",
",",
"callable_object",
",",
"id",
",",
"cache_duration",
"=",
"180",
",",
"cache_factory",
"=",
"DEFAULT_CACHE_FACTORY",
",",
"cache_id_generator",
"=",
"_default_cache_id_generator",
")",
":",
"if",
"not",
"callable",
"(",
"callable_object",
")",
":",
"raise",
"CachedMethodError",
"(",
"\"callable_object %r is not callable\"",
"%",
"(",
"callable_object",
",",
")",
")",
"if",
"not",
"id",
":",
"raise",
"CachedMethodError",
"(",
"\"id must be specified\"",
")",
"self",
".",
"id",
"=",
"id",
"self",
".",
"callable_object",
"=",
"callable_object",
"self",
".",
"cache_duration",
"=",
"cache_duration",
"self",
".",
"cache_factory",
"=",
"cache_factory",
"self",
".",
"generateCacheId",
"=",
"cache_id_generator"
] | https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/product/ERP5Type/Cache.py#L234-L254 |
||
thisismedium/python-xmpp-server | 8e6e009eabfecf4bbe36bdf2960455f0481a2ec1 | xmpp/plugin.py | python | Plugin.__new__ | (cls, state, *args, **kwargs) | return self | Record a special state attribute that's used internally in
the Plugin base class. | Record a special state attribute that's used internally in
the Plugin base class. | [
"Record",
"a",
"special",
"state",
"attribute",
"that",
"s",
"used",
"internally",
"in",
"the",
"Plugin",
"base",
"class",
"."
] | def __new__(cls, state, *args, **kwargs):
"""Record a special state attribute that's used internally in
the Plugin base class."""
self = object.__new__(cls)
self.__state = state
self.__core = state.core
self.__plugins = state.plugins
return self | [
"def",
"__new__",
"(",
"cls",
",",
"state",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
"=",
"object",
".",
"__new__",
"(",
"cls",
")",
"self",
".",
"__state",
"=",
"state",
"self",
".",
"__core",
"=",
"state",
".",
"core",
"self",
".",
"__plugins",
"=",
"state",
".",
"plugins",
"return",
"self"
] | https://github.com/thisismedium/python-xmpp-server/blob/8e6e009eabfecf4bbe36bdf2960455f0481a2ec1/xmpp/plugin.py#L461-L470 |
|
zhao94254/fun | 491acf6a7d9594f91a8cd717a403d9e1e5d0f386 | network/case/stress.py | python | BaseQuery.check | (self) | return rate == 1, rate | 返回是否全部成功, 和成功率 | 返回是否全部成功, 和成功率 | [
"返回是否全部成功,",
"和成功率"
] | def check(self):
""" 返回是否全部成功, 和成功率"""
methods = self.getmethods()
_check = [m for m in methods if m[0].startswith('check_')]
all_check = len(_check)
success_check = len([1 for m in _check if m[1]()])
rate = success_check / all_check
return rate == 1, rate | [
"def",
"check",
"(",
"self",
")",
":",
"methods",
"=",
"self",
".",
"getmethods",
"(",
")",
"_check",
"=",
"[",
"m",
"for",
"m",
"in",
"methods",
"if",
"m",
"[",
"0",
"]",
".",
"startswith",
"(",
"'check_'",
")",
"]",
"all_check",
"=",
"len",
"(",
"_check",
")",
"success_check",
"=",
"len",
"(",
"[",
"1",
"for",
"m",
"in",
"_check",
"if",
"m",
"[",
"1",
"]",
"(",
")",
"]",
")",
"rate",
"=",
"success_check",
"/",
"all_check",
"return",
"rate",
"==",
"1",
",",
"rate"
] | https://github.com/zhao94254/fun/blob/491acf6a7d9594f91a8cd717a403d9e1e5d0f386/network/case/stress.py#L46-L53 |
|
replit-archive/jsrepl | 36d79b6288ca5d26208e8bade2a168c6ebcb2376 | extern/python/closured/lib/python2.7/robotparser.py | python | RobotFileParser.modified | (self) | Sets the time the robots.txt file was last fetched to the
current time. | Sets the time the robots.txt file was last fetched to the
current time. | [
"Sets",
"the",
"time",
"the",
"robots",
".",
"txt",
"file",
"was",
"last",
"fetched",
"to",
"the",
"current",
"time",
"."
] | def modified(self):
"""Sets the time the robots.txt file was last fetched to the
current time.
"""
import time
self.last_checked = time.time() | [
"def",
"modified",
"(",
"self",
")",
":",
"import",
"time",
"self",
".",
"last_checked",
"=",
"time",
".",
"time",
"(",
")"
] | https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/closured/lib/python2.7/robotparser.py#L41-L47 |
||
ibuler/jumpserver | 0aa43c7cabc012cf02f39826fdce80f4b7b7654b | jperm/ansible_api.py | python | MyTask.push_multi_key | (self, **user_info) | push multi key
:param user_info:
:return: | push multi key
:param user_info:
:return: | [
"push",
"multi",
"key",
":",
"param",
"user_info",
":",
":",
"return",
":"
] | def push_multi_key(self, **user_info):
"""
push multi key
:param user_info:
:return:
"""
ret_failed = []
ret_success = []
for user, key_path in user_info.iteritems():
ret = self.push_key(user, key_path)
if ret.get("status") == "ok":
ret_success.append(ret)
if ret.get("status") == "failed":
ret_failed.append(ret)
if ret_failed:
return {"status": "failed", "msg": ret_failed}
else:
return {"status": "success", "msg": ret_success} | [
"def",
"push_multi_key",
"(",
"self",
",",
"*",
"*",
"user_info",
")",
":",
"ret_failed",
"=",
"[",
"]",
"ret_success",
"=",
"[",
"]",
"for",
"user",
",",
"key_path",
"in",
"user_info",
".",
"iteritems",
"(",
")",
":",
"ret",
"=",
"self",
".",
"push_key",
"(",
"user",
",",
"key_path",
")",
"if",
"ret",
".",
"get",
"(",
"\"status\"",
")",
"==",
"\"ok\"",
":",
"ret_success",
".",
"append",
"(",
"ret",
")",
"if",
"ret",
".",
"get",
"(",
"\"status\"",
")",
"==",
"\"failed\"",
":",
"ret_failed",
".",
"append",
"(",
"ret",
")",
"if",
"ret_failed",
":",
"return",
"{",
"\"status\"",
":",
"\"failed\"",
",",
"\"msg\"",
":",
"ret_failed",
"}",
"else",
":",
"return",
"{",
"\"status\"",
":",
"\"success\"",
",",
"\"msg\"",
":",
"ret_success",
"}"
] | https://github.com/ibuler/jumpserver/blob/0aa43c7cabc012cf02f39826fdce80f4b7b7654b/jperm/ansible_api.py#L294-L312 |
||
oscarmlage/django-cruds-adminlte | 9110c692d22a4c1c3f92840f7196b0b6e59006ba | cruds_adminlte/urls.py | python | crud_for_model | (model, urlprefix=None, namespace=None,
login_required=False, check_perms=False,
add_form=None,
update_form=None, views=None, cruds_url=None,
list_fields=None, related_fields=None,
mixin=None) | return nc.get_urls() | Returns list of ``url`` items to CRUD a model.
@param mixin=none -- mixin to be used as a base. | Returns list of ``url`` items to CRUD a model. | [
"Returns",
"list",
"of",
"url",
"items",
"to",
"CRUD",
"a",
"model",
"."
] | def crud_for_model(model, urlprefix=None, namespace=None,
login_required=False, check_perms=False,
add_form=None,
update_form=None, views=None, cruds_url=None,
list_fields=None, related_fields=None,
mixin=None):
"""
Returns list of ``url`` items to CRUD a model.
@param mixin=none -- mixin to be used as a base.
"""
if mixin and not issubclass(mixin, CRUDMixin):
raise ValueError(
'Mixin needs to be a subclass of <%s>', CRUDMixin.__name__
)
mymodel = model
myurlprefix = urlprefix
mynamespace = namespace
mycheck_perms = check_perms
myadd_form = add_form
myupdate_form = update_form
mycruds_url = cruds_url
mylist_fields = list_fields
myrelated_fields = related_fields
mymixin = mixin
class NOCLASS(CRUDView):
model = mymodel
urlprefix = myurlprefix
namespace = mynamespace
check_login = login_required
check_perms = mycheck_perms
update_form = myupdate_form
add_form = myadd_form
views_available = views
cruds_url = mycruds_url
list_fields = mylist_fields
related_fields = myrelated_fields
# mixin = mymixin # @FIXME TypeError: metaclass conflict: the metaclass
# of a derived class must be a (non-strict) subclass of the metaclasses
# of all its bases
nc = NOCLASS()
return nc.get_urls() | [
"def",
"crud_for_model",
"(",
"model",
",",
"urlprefix",
"=",
"None",
",",
"namespace",
"=",
"None",
",",
"login_required",
"=",
"False",
",",
"check_perms",
"=",
"False",
",",
"add_form",
"=",
"None",
",",
"update_form",
"=",
"None",
",",
"views",
"=",
"None",
",",
"cruds_url",
"=",
"None",
",",
"list_fields",
"=",
"None",
",",
"related_fields",
"=",
"None",
",",
"mixin",
"=",
"None",
")",
":",
"if",
"mixin",
"and",
"not",
"issubclass",
"(",
"mixin",
",",
"CRUDMixin",
")",
":",
"raise",
"ValueError",
"(",
"'Mixin needs to be a subclass of <%s>'",
",",
"CRUDMixin",
".",
"__name__",
")",
"mymodel",
"=",
"model",
"myurlprefix",
"=",
"urlprefix",
"mynamespace",
"=",
"namespace",
"mycheck_perms",
"=",
"check_perms",
"myadd_form",
"=",
"add_form",
"myupdate_form",
"=",
"update_form",
"mycruds_url",
"=",
"cruds_url",
"mylist_fields",
"=",
"list_fields",
"myrelated_fields",
"=",
"related_fields",
"mymixin",
"=",
"mixin",
"class",
"NOCLASS",
"(",
"CRUDView",
")",
":",
"model",
"=",
"mymodel",
"urlprefix",
"=",
"myurlprefix",
"namespace",
"=",
"mynamespace",
"check_login",
"=",
"login_required",
"check_perms",
"=",
"mycheck_perms",
"update_form",
"=",
"myupdate_form",
"add_form",
"=",
"myadd_form",
"views_available",
"=",
"views",
"cruds_url",
"=",
"mycruds_url",
"list_fields",
"=",
"mylist_fields",
"related_fields",
"=",
"myrelated_fields",
"# mixin = mymixin # @FIXME TypeError: metaclass conflict: the metaclass",
"# of a derived class must be a (non-strict) subclass of the metaclasses",
"# of all its bases",
"nc",
"=",
"NOCLASS",
"(",
")",
"return",
"nc",
".",
"get_urls",
"(",
")"
] | https://github.com/oscarmlage/django-cruds-adminlte/blob/9110c692d22a4c1c3f92840f7196b0b6e59006ba/cruds_adminlte/urls.py#L10-L53 |
|
Opentrons/opentrons | 466e0567065d8773a81c25cd1b5c7998e00adf2c | api/docs/v1/api_cache/pipette.py | python | Pipette.transfer | (self, volume, source, dest, **kwargs) | Transfer will move a volume of liquid from a source location(s)
to a dest location(s). It is a higher-level command, incorporating
other :any:`Pipette` commands, like :any:`aspirate` and
:any:`dispense`, designed to make protocol writing easier at the
cost of specificity.
Parameters
----------
volumes : number, list, or tuple
The amount of volume to remove from each `sources` :any:`Placeable`
and add to each `targets` :any:`Placeable`. If `volumes` is a list,
each volume will be used for the sources/targets at the
matching index. If `volumes` is a tuple with two elements,
like `(20, 100)`, then a list of volumes will be generated with
a linear gradient between the two volumes in the tuple.
source : Placeable or list
Single :any:`Placeable` or list of :any:`Placeable`\\ s, from where
liquid will be :any:`aspirate`\\ d from.
dest : Placeable or list
Single :any:`Placeable` or list of :any:`Placeable`\\ s, where
liquid will be :any:`dispense`\\ ed to.
new_tip : str
The number of clean tips this transfer command will use. If
'never', no tips will be picked up nor dropped. If 'once', a
single tip will be used for all cmds. If 'always', a new tip
will be used for each transfer. Default is 'once'.
trash : boolean
If `True` (default behavior) and trash container has been attached
to this `Pipette`, then the tip will be sent to the trash
container.
If `False`, then tips will be returned to their associated tiprack.
touch_tip : boolean
If `True`, a :any:`touch_tip` will occur following each
:any:`aspirate` and :any:`dispense`. If set to `False` (default),
no :any:`touch_tip` will occur.
blow_out : boolean
If `True`, a :any:`blow_out` will occur following each
:any:`dispense`, but only if the pipette has no liquid left in it.
If set to `False` (default), no :any:`blow_out` will occur.
mix_before : tuple
Specify the number of repetitions volume to mix, and a :any:`mix`
will proceed each :any:`aspirate` during the transfer and dispense.
The tuple's values is interpreted as (repetitions, volume).
mix_after : tuple
Specify the number of repetitions volume to mix, and a :any:`mix`
will following each :any:`dispense` during the transfer or
consolidate. The tuple's values is interpreted as
(repetitions, volume).
carryover : boolean
If `True` (default), any `volumes` that exceed the maximum volume
of this `Pipette` will be split into multiple smaller volumes.
repeat : boolean
(Only applicable to :any:`distribute` and :any:`consolidate`)If
`True` (default), sequential :any:`aspirate` volumes will be
combined into one tip for the purpose of saving time. If `False`,
all volumes will be transferred seperately.
gradient : lambda
Function for calculated the curve used for gradient volumes.
When `volumes` is a tuple of length 2, it's values are used
to create a list of gradient volumes. The default curve for
this gradient is linear (lambda x: x), however a method can
be passed with the `gradient` keyword argument to create a
custom curve.
Returns
-------
This instance of :class:`Pipette`.
Examples
--------
>>> from opentrons import instruments, labware, robot # doctest: +SKIP
>>> robot.reset() # doctest: +SKIP
>>> plate = labware.load('96-flat', '5') # doctest: +SKIP
>>> p300 = instruments.P300_Single(mount='right') # doctest: +SKIP
>>> p300.transfer(50, plate[0], plate[1]) # doctest: +SKIP | Transfer will move a volume of liquid from a source location(s)
to a dest location(s). It is a higher-level command, incorporating
other :any:`Pipette` commands, like :any:`aspirate` and
:any:`dispense`, designed to make protocol writing easier at the
cost of specificity. | [
"Transfer",
"will",
"move",
"a",
"volume",
"of",
"liquid",
"from",
"a",
"source",
"location",
"(",
"s",
")",
"to",
"a",
"dest",
"location",
"(",
"s",
")",
".",
"It",
"is",
"a",
"higher",
"-",
"level",
"command",
"incorporating",
"other",
":",
"any",
":",
"Pipette",
"commands",
"like",
":",
"any",
":",
"aspirate",
"and",
":",
"any",
":",
"dispense",
"designed",
"to",
"make",
"protocol",
"writing",
"easier",
"at",
"the",
"cost",
"of",
"specificity",
"."
] | def transfer(self, volume, source, dest, **kwargs):
"""
Transfer will move a volume of liquid from a source location(s)
to a dest location(s). It is a higher-level command, incorporating
other :any:`Pipette` commands, like :any:`aspirate` and
:any:`dispense`, designed to make protocol writing easier at the
cost of specificity.
Parameters
----------
volumes : number, list, or tuple
The amount of volume to remove from each `sources` :any:`Placeable`
and add to each `targets` :any:`Placeable`. If `volumes` is a list,
each volume will be used for the sources/targets at the
matching index. If `volumes` is a tuple with two elements,
like `(20, 100)`, then a list of volumes will be generated with
a linear gradient between the two volumes in the tuple.
source : Placeable or list
Single :any:`Placeable` or list of :any:`Placeable`\\ s, from where
liquid will be :any:`aspirate`\\ d from.
dest : Placeable or list
Single :any:`Placeable` or list of :any:`Placeable`\\ s, where
liquid will be :any:`dispense`\\ ed to.
new_tip : str
The number of clean tips this transfer command will use. If
'never', no tips will be picked up nor dropped. If 'once', a
single tip will be used for all cmds. If 'always', a new tip
will be used for each transfer. Default is 'once'.
trash : boolean
If `True` (default behavior) and trash container has been attached
to this `Pipette`, then the tip will be sent to the trash
container.
If `False`, then tips will be returned to their associated tiprack.
touch_tip : boolean
If `True`, a :any:`touch_tip` will occur following each
:any:`aspirate` and :any:`dispense`. If set to `False` (default),
no :any:`touch_tip` will occur.
blow_out : boolean
If `True`, a :any:`blow_out` will occur following each
:any:`dispense`, but only if the pipette has no liquid left in it.
If set to `False` (default), no :any:`blow_out` will occur.
mix_before : tuple
Specify the number of repetitions volume to mix, and a :any:`mix`
will proceed each :any:`aspirate` during the transfer and dispense.
The tuple's values is interpreted as (repetitions, volume).
mix_after : tuple
Specify the number of repetitions volume to mix, and a :any:`mix`
will following each :any:`dispense` during the transfer or
consolidate. The tuple's values is interpreted as
(repetitions, volume).
carryover : boolean
If `True` (default), any `volumes` that exceed the maximum volume
of this `Pipette` will be split into multiple smaller volumes.
repeat : boolean
(Only applicable to :any:`distribute` and :any:`consolidate`)If
`True` (default), sequential :any:`aspirate` volumes will be
combined into one tip for the purpose of saving time. If `False`,
all volumes will be transferred seperately.
gradient : lambda
Function for calculated the curve used for gradient volumes.
When `volumes` is a tuple of length 2, it's values are used
to create a list of gradient volumes. The default curve for
this gradient is linear (lambda x: x), however a method can
be passed with the `gradient` keyword argument to create a
custom curve.
Returns
-------
This instance of :class:`Pipette`.
Examples
--------
>>> from opentrons import instruments, labware, robot # doctest: +SKIP
>>> robot.reset() # doctest: +SKIP
>>> plate = labware.load('96-flat', '5') # doctest: +SKIP
>>> p300 = instruments.P300_Single(mount='right') # doctest: +SKIP
>>> p300.transfer(50, plate[0], plate[1]) # doctest: +SKIP
"""
pass | [
"def",
"transfer",
"(",
"self",
",",
"volume",
",",
"source",
",",
"dest",
",",
"*",
"*",
"kwargs",
")",
":",
"pass"
] | https://github.com/Opentrons/opentrons/blob/466e0567065d8773a81c25cd1b5c7998e00adf2c/api/docs/v1/api_cache/pipette.py#L614-L705 |
||
replit-archive/jsrepl | 36d79b6288ca5d26208e8bade2a168c6ebcb2376 | extern/python/closured/lib/python2.7/collections.py | python | OrderedDict.fromkeys | (cls, iterable, value=None) | return self | OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S.
If not specified, the value defaults to None. | OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S.
If not specified, the value defaults to None. | [
"OD",
".",
"fromkeys",
"(",
"S",
"[",
"v",
"]",
")",
"-",
">",
"New",
"ordered",
"dictionary",
"with",
"keys",
"from",
"S",
".",
"If",
"not",
"specified",
"the",
"value",
"defaults",
"to",
"None",
"."
] | def fromkeys(cls, iterable, value=None):
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S.
If not specified, the value defaults to None.
'''
self = cls()
for key in iterable:
self[key] = value
return self | [
"def",
"fromkeys",
"(",
"cls",
",",
"iterable",
",",
"value",
"=",
"None",
")",
":",
"self",
"=",
"cls",
"(",
")",
"for",
"key",
"in",
"iterable",
":",
"self",
"[",
"key",
"]",
"=",
"value",
"return",
"self"
] | https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/closured/lib/python2.7/collections.py#L195-L203 |
|
googleglass/mirror-quickstart-python | e34077bae91657170c305702471f5c249eb1b686 | lib/gflags_validators.py | python | DictionaryValidator.__init__ | (self, flag_names, checker, message) | Constructor.
Args:
flag_names: [string], containing names of the flags used by checker.
checker: function to verify the validator.
input - dictionary, with keys() being flag_names, and value for each
key being the value of the corresponding flag (string, boolean, etc).
output - Boolean. Must return True if validator constraint is satisfied.
If constraint is not satisfied, it should either return False or
raise Error.
message: string, error message to be shown to the user if validator's
condition is not satisfied | Constructor. | [
"Constructor",
"."
] | def __init__(self, flag_names, checker, message):
"""Constructor.
Args:
flag_names: [string], containing names of the flags used by checker.
checker: function to verify the validator.
input - dictionary, with keys() being flag_names, and value for each
key being the value of the corresponding flag (string, boolean, etc).
output - Boolean. Must return True if validator constraint is satisfied.
If constraint is not satisfied, it should either return False or
raise Error.
message: string, error message to be shown to the user if validator's
condition is not satisfied
"""
super(DictionaryValidator, self).__init__(checker, message)
self.flag_names = flag_names | [
"def",
"__init__",
"(",
"self",
",",
"flag_names",
",",
"checker",
",",
"message",
")",
":",
"super",
"(",
"DictionaryValidator",
",",
"self",
")",
".",
"__init__",
"(",
"checker",
",",
"message",
")",
"self",
".",
"flag_names",
"=",
"flag_names"
] | https://github.com/googleglass/mirror-quickstart-python/blob/e34077bae91657170c305702471f5c249eb1b686/lib/gflags_validators.py#L151-L166 |
||
jxcore/jxcore | b05f1f2d2c9d62c813c7d84f3013dbbf30b6e410 | deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py | python | Writer.__init__ | (self, project_path, version, name, guid=None, platforms=None) | Initializes the project.
Args:
project_path: Path to the project file.
version: Format version to emit.
name: Name of the project.
guid: GUID to use for project, if not None.
platforms: Array of string, the supported platforms. If null, ['Win32'] | Initializes the project. | [
"Initializes",
"the",
"project",
"."
] | def __init__(self, project_path, version, name, guid=None, platforms=None):
"""Initializes the project.
Args:
project_path: Path to the project file.
version: Format version to emit.
name: Name of the project.
guid: GUID to use for project, if not None.
platforms: Array of string, the supported platforms. If null, ['Win32']
"""
self.project_path = project_path
self.version = version
self.name = name
self.guid = guid
# Default to Win32 for platforms.
if not platforms:
platforms = ['Win32']
# Initialize the specifications of the various sections.
self.platform_section = ['Platforms']
for platform in platforms:
self.platform_section.append(['Platform', {'Name': platform}])
self.tool_files_section = ['ToolFiles']
self.configurations_section = ['Configurations']
self.files_section = ['Files']
# Keep a dict keyed on filename to speed up access.
self.files_dict = dict() | [
"def",
"__init__",
"(",
"self",
",",
"project_path",
",",
"version",
",",
"name",
",",
"guid",
"=",
"None",
",",
"platforms",
"=",
"None",
")",
":",
"self",
".",
"project_path",
"=",
"project_path",
"self",
".",
"version",
"=",
"version",
"self",
".",
"name",
"=",
"name",
"self",
".",
"guid",
"=",
"guid",
"# Default to Win32 for platforms.",
"if",
"not",
"platforms",
":",
"platforms",
"=",
"[",
"'Win32'",
"]",
"# Initialize the specifications of the various sections.",
"self",
".",
"platform_section",
"=",
"[",
"'Platforms'",
"]",
"for",
"platform",
"in",
"platforms",
":",
"self",
".",
"platform_section",
".",
"append",
"(",
"[",
"'Platform'",
",",
"{",
"'Name'",
":",
"platform",
"}",
"]",
")",
"self",
".",
"tool_files_section",
"=",
"[",
"'ToolFiles'",
"]",
"self",
".",
"configurations_section",
"=",
"[",
"'Configurations'",
"]",
"self",
".",
"files_section",
"=",
"[",
"'Files'",
"]",
"# Keep a dict keyed on filename to speed up access.",
"self",
".",
"files_dict",
"=",
"dict",
"(",
")"
] | https://github.com/jxcore/jxcore/blob/b05f1f2d2c9d62c813c7d84f3013dbbf30b6e410/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py#L54-L82 |
||
odoo/odoo | 8de8c196a137f4ebbf67d7c7c83fee36f873f5c8 | addons/hw_drivers/connection_manager.py | python | ConnectionManager._refresh_displays | (self) | Refresh all displays to hide the pairing code | Refresh all displays to hide the pairing code | [
"Refresh",
"all",
"displays",
"to",
"hide",
"the",
"pairing",
"code"
] | def _refresh_displays(self):
"""Refresh all displays to hide the pairing code"""
for d in iot_devices:
if iot_devices[d].device_type == 'display':
iot_devices[d].action({
'action': 'display_refresh'
}) | [
"def",
"_refresh_displays",
"(",
"self",
")",
":",
"for",
"d",
"in",
"iot_devices",
":",
"if",
"iot_devices",
"[",
"d",
"]",
".",
"device_type",
"==",
"'display'",
":",
"iot_devices",
"[",
"d",
"]",
".",
"action",
"(",
"{",
"'action'",
":",
"'display_refresh'",
"}",
")"
] | https://github.com/odoo/odoo/blob/8de8c196a137f4ebbf67d7c7c83fee36f873f5c8/addons/hw_drivers/connection_manager.py#L67-L73 |
||
alaxli/ansible_ui | ea7a76e1de6d2aec3777c0182dd8cc3529c9ccd7 | desktop/apps/ansible/elfinder/connector.py | python | ElfinderConnector.execute | (self, cmd, **kwargs) | return result | Exec command and return result | Exec command and return result | [
"Exec",
"command",
"and",
"return",
"result"
] | def execute(self, cmd, **kwargs):
"""
Exec command and return result
"""
if not self._loaded:
return { 'error' : self.error(ElfinderErrorMessages.ERROR_CONF, ElfinderErrorMessages.ERROR_CONF_NO_VOL)}
if not self.commandExists(cmd):
return { 'error' : self.error(ElfinderErrorMessages.ERROR_UNKNOWN_CMD, cmd)}
#check all required arguments are provided
for arg, req in self.commandArgsList(cmd).items():
if req and (not arg in kwargs or not kwargs[arg]):
return {'error' : self.error(ElfinderErrorMessages.ERROR_INV_PARAMS, cmd)}
#set mimes filter and pop mimes from the arguments list
if 'mimes' in kwargs:
for id_ in self._volumes:
self._volumes[id_].set_mimes_filter(kwargs['mimes'])
kwargs.pop('mimes')
debug = self._debug or ('debug' in kwargs and int(kwargs['debug']))
#remove debug kewyord argument
if 'debug' in kwargs:
kwargs.pop('debug')
result = getattr(self, '_%s' % cmd)(**kwargs)
#checked for removed items as these are not directly returned
if 'removed' in result:
for id_ in self._volumes:
result['removed'] += self._volumes[id_].removed()
self._volumes[id_].reset_removed()
#replace removed files info with removed files hashes and filter out duplicates
result['removed'] = list(set([f['hash'] for f in result['removed']]))
#call handlers for this command
#TODO: a signal must be sent here
if debug:
result['debug'] = {
'connector' : 'yawd-elfinder',
'time' : time.time() - self._time,
'upload' : self._uploadDebug,
'volumes' : [v.debug() for v in self._volumes.values()],
'mountErrors' : self._mountErrors
}
return result | [
"def",
"execute",
"(",
"self",
",",
"cmd",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"self",
".",
"_loaded",
":",
"return",
"{",
"'error'",
":",
"self",
".",
"error",
"(",
"ElfinderErrorMessages",
".",
"ERROR_CONF",
",",
"ElfinderErrorMessages",
".",
"ERROR_CONF_NO_VOL",
")",
"}",
"if",
"not",
"self",
".",
"commandExists",
"(",
"cmd",
")",
":",
"return",
"{",
"'error'",
":",
"self",
".",
"error",
"(",
"ElfinderErrorMessages",
".",
"ERROR_UNKNOWN_CMD",
",",
"cmd",
")",
"}",
"#check all required arguments are provided",
"for",
"arg",
",",
"req",
"in",
"self",
".",
"commandArgsList",
"(",
"cmd",
")",
".",
"items",
"(",
")",
":",
"if",
"req",
"and",
"(",
"not",
"arg",
"in",
"kwargs",
"or",
"not",
"kwargs",
"[",
"arg",
"]",
")",
":",
"return",
"{",
"'error'",
":",
"self",
".",
"error",
"(",
"ElfinderErrorMessages",
".",
"ERROR_INV_PARAMS",
",",
"cmd",
")",
"}",
"#set mimes filter and pop mimes from the arguments list",
"if",
"'mimes'",
"in",
"kwargs",
":",
"for",
"id_",
"in",
"self",
".",
"_volumes",
":",
"self",
".",
"_volumes",
"[",
"id_",
"]",
".",
"set_mimes_filter",
"(",
"kwargs",
"[",
"'mimes'",
"]",
")",
"kwargs",
".",
"pop",
"(",
"'mimes'",
")",
"debug",
"=",
"self",
".",
"_debug",
"or",
"(",
"'debug'",
"in",
"kwargs",
"and",
"int",
"(",
"kwargs",
"[",
"'debug'",
"]",
")",
")",
"#remove debug kewyord argument ",
"if",
"'debug'",
"in",
"kwargs",
":",
"kwargs",
".",
"pop",
"(",
"'debug'",
")",
"result",
"=",
"getattr",
"(",
"self",
",",
"'_%s'",
"%",
"cmd",
")",
"(",
"*",
"*",
"kwargs",
")",
"#checked for removed items as these are not directly returned",
"if",
"'removed'",
"in",
"result",
":",
"for",
"id_",
"in",
"self",
".",
"_volumes",
":",
"result",
"[",
"'removed'",
"]",
"+=",
"self",
".",
"_volumes",
"[",
"id_",
"]",
".",
"removed",
"(",
")",
"self",
".",
"_volumes",
"[",
"id_",
"]",
".",
"reset_removed",
"(",
")",
"#replace removed files info with removed files hashes and filter out duplicates",
"result",
"[",
"'removed'",
"]",
"=",
"list",
"(",
"set",
"(",
"[",
"f",
"[",
"'hash'",
"]",
"for",
"f",
"in",
"result",
"[",
"'removed'",
"]",
"]",
")",
")",
"#call handlers for this command",
"#TODO: a signal must be sent here",
"if",
"debug",
":",
"result",
"[",
"'debug'",
"]",
"=",
"{",
"'connector'",
":",
"'yawd-elfinder'",
",",
"'time'",
":",
"time",
".",
"time",
"(",
")",
"-",
"self",
".",
"_time",
",",
"'upload'",
":",
"self",
".",
"_uploadDebug",
",",
"'volumes'",
":",
"[",
"v",
".",
"debug",
"(",
")",
"for",
"v",
"in",
"self",
".",
"_volumes",
".",
"values",
"(",
")",
"]",
",",
"'mountErrors'",
":",
"self",
".",
"_mountErrors",
"}",
"return",
"result"
] | https://github.com/alaxli/ansible_ui/blob/ea7a76e1de6d2aec3777c0182dd8cc3529c9ccd7/desktop/apps/ansible/elfinder/connector.py#L129-L177 |
|
scottrogowski/code2flow | 37e45ca4340289f8ceec79b3fe5131c401387c58 | code2flow/python.py | python | get_inherits | (tree) | return [base.id for base in tree.bases if type(base) == ast.Name] | Get what superclasses this class inherits
This handles exact names like 'MyClass' but skips things like 'cls' and 'mod.MyClass'
Resolving those would be difficult
:param tree ast:
:rtype: list[str] | Get what superclasses this class inherits
This handles exact names like 'MyClass' but skips things like 'cls' and 'mod.MyClass'
Resolving those would be difficult
:param tree ast:
:rtype: list[str] | [
"Get",
"what",
"superclasses",
"this",
"class",
"inherits",
"This",
"handles",
"exact",
"names",
"like",
"MyClass",
"but",
"skips",
"things",
"like",
"cls",
"and",
"mod",
".",
"MyClass",
"Resolving",
"those",
"would",
"be",
"difficult",
":",
"param",
"tree",
"ast",
":",
":",
"rtype",
":",
"list",
"[",
"str",
"]"
] | def get_inherits(tree):
"""
Get what superclasses this class inherits
This handles exact names like 'MyClass' but skips things like 'cls' and 'mod.MyClass'
Resolving those would be difficult
:param tree ast:
:rtype: list[str]
"""
return [base.id for base in tree.bases if type(base) == ast.Name] | [
"def",
"get_inherits",
"(",
"tree",
")",
":",
"return",
"[",
"base",
".",
"id",
"for",
"base",
"in",
"tree",
".",
"bases",
"if",
"type",
"(",
"base",
")",
"==",
"ast",
".",
"Name",
"]"
] | https://github.com/scottrogowski/code2flow/blob/37e45ca4340289f8ceec79b3fe5131c401387c58/code2flow/python.py#L132-L140 |
|
Opentrons/opentrons | 466e0567065d8773a81c25cd1b5c7998e00adf2c | api/src/opentrons/protocols/context/protocol_api/instrument_context.py | python | InstrumentContextImplementation.delay | (self) | Delay protocol execution. | Delay protocol execution. | [
"Delay",
"protocol",
"execution",
"."
] | def delay(self) -> None:
"""Delay protocol execution."""
self._protocol_interface.delay(seconds=0, msg=None) | [
"def",
"delay",
"(",
"self",
")",
"->",
"None",
":",
"self",
".",
"_protocol_interface",
".",
"delay",
"(",
"seconds",
"=",
"0",
",",
"msg",
"=",
"None",
")"
] | https://github.com/Opentrons/opentrons/blob/466e0567065d8773a81c25cd1b5c7998e00adf2c/api/src/opentrons/protocols/context/protocol_api/instrument_context.py#L140-L142 |
||
ElasticHQ/elasticsearch-HQ | 8197e21d09b1312492dcb6998a2349d73b06efc6 | elastichq/vendor/elasticsearch/client/cluster.py | python | ClusterClient.reroute | (self, body=None, params=None) | return self.transport.perform_request('POST', '/_cluster/reroute',
params=params, body=body) | Explicitly execute a cluster reroute allocation command including specific commands.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-reroute.html>`_
:arg body: The definition of `commands` to perform (`move`, `cancel`,
`allocate`)
:arg dry_run: Simulate the operation only and return the resulting state
:arg explain: Return an explanation of why the commands can or cannot be
executed
:arg master_timeout: Explicit operation timeout for connection to master
node
:arg metric: Limit the information returned to the specified metrics.
Defaults to all but metadata, valid choices are: '_all', 'blocks',
'metadata', 'nodes', 'routing_table', 'master_node', 'version'
:arg retry_failed: Retries allocation of shards that are blocked due to
too many subsequent allocation failures
:arg timeout: Explicit operation timeout | Explicitly execute a cluster reroute allocation command including specific commands.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-reroute.html>`_ | [
"Explicitly",
"execute",
"a",
"cluster",
"reroute",
"allocation",
"command",
"including",
"specific",
"commands",
".",
"<http",
":",
"//",
"www",
".",
"elastic",
".",
"co",
"/",
"guide",
"/",
"en",
"/",
"elasticsearch",
"/",
"reference",
"/",
"current",
"/",
"cluster",
"-",
"reroute",
".",
"html",
">",
"_"
] | def reroute(self, body=None, params=None):
"""
Explicitly execute a cluster reroute allocation command including specific commands.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-reroute.html>`_
:arg body: The definition of `commands` to perform (`move`, `cancel`,
`allocate`)
:arg dry_run: Simulate the operation only and return the resulting state
:arg explain: Return an explanation of why the commands can or cannot be
executed
:arg master_timeout: Explicit operation timeout for connection to master
node
:arg metric: Limit the information returned to the specified metrics.
Defaults to all but metadata, valid choices are: '_all', 'blocks',
'metadata', 'nodes', 'routing_table', 'master_node', 'version'
:arg retry_failed: Retries allocation of shards that are blocked due to
too many subsequent allocation failures
:arg timeout: Explicit operation timeout
"""
return self.transport.perform_request('POST', '/_cluster/reroute',
params=params, body=body) | [
"def",
"reroute",
"(",
"self",
",",
"body",
"=",
"None",
",",
"params",
"=",
"None",
")",
":",
"return",
"self",
".",
"transport",
".",
"perform_request",
"(",
"'POST'",
",",
"'/_cluster/reroute'",
",",
"params",
"=",
"params",
",",
"body",
"=",
"body",
")"
] | https://github.com/ElasticHQ/elasticsearch-HQ/blob/8197e21d09b1312492dcb6998a2349d73b06efc6/elastichq/vendor/elasticsearch/client/cluster.py#L100-L120 |
|
mceSystems/node-jsc | 90634f3064fab8e89a85b3942f0cc5054acc86fa | deps/jscshim/webkit/Source/JavaScriptCore/disassembler/udis86/ud_opcode.py | python | UdInsnDef.lookupPrefix | (self, pfx) | return True if pfx in self.prefixes else None | Lookup prefix (if any, None otherwise), by name | Lookup prefix (if any, None otherwise), by name | [
"Lookup",
"prefix",
"(",
"if",
"any",
"None",
"otherwise",
")",
"by",
"name"
] | def lookupPrefix(self, pfx):
"""Lookup prefix (if any, None otherwise), by name"""
return True if pfx in self.prefixes else None | [
"def",
"lookupPrefix",
"(",
"self",
",",
"pfx",
")",
":",
"return",
"True",
"if",
"pfx",
"in",
"self",
".",
"prefixes",
"else",
"None"
] | https://github.com/mceSystems/node-jsc/blob/90634f3064fab8e89a85b3942f0cc5054acc86fa/deps/jscshim/webkit/Source/JavaScriptCore/disassembler/udis86/ud_opcode.py#L51-L53 |
|
odoo/odoo | 8de8c196a137f4ebbf67d7c7c83fee36f873f5c8 | odoo/models.py | python | BaseModel.__or__ | (self, other) | return self.union(other) | Return the union of two recordsets.
Note that first occurrence order is preserved. | Return the union of two recordsets.
Note that first occurrence order is preserved. | [
"Return",
"the",
"union",
"of",
"two",
"recordsets",
".",
"Note",
"that",
"first",
"occurrence",
"order",
"is",
"preserved",
"."
] | def __or__(self, other):
""" Return the union of two recordsets.
Note that first occurrence order is preserved.
"""
return self.union(other) | [
"def",
"__or__",
"(",
"self",
",",
"other",
")",
":",
"return",
"self",
".",
"union",
"(",
"other",
")"
] | https://github.com/odoo/odoo/blob/8de8c196a137f4ebbf67d7c7c83fee36f873f5c8/odoo/models.py#L5799-L5803 |
|
francescou/docker-compose-ui | 8fc85c53afbbcc1fc121c7de4f2e07003fa2dfde | main.py | python | get_web_console_pattern | () | return jsonify(web_console_pattern=os.getenv('WEB_CONSOLE_PATTERN')) | forward WEB_CONSOLE_PATTERN env var from server to spa | forward WEB_CONSOLE_PATTERN env var from server to spa | [
"forward",
"WEB_CONSOLE_PATTERN",
"env",
"var",
"from",
"server",
"to",
"spa"
] | def get_web_console_pattern():
"""
forward WEB_CONSOLE_PATTERN env var from server to spa
"""
return jsonify(web_console_pattern=os.getenv('WEB_CONSOLE_PATTERN')) | [
"def",
"get_web_console_pattern",
"(",
")",
":",
"return",
"jsonify",
"(",
"web_console_pattern",
"=",
"os",
".",
"getenv",
"(",
"'WEB_CONSOLE_PATTERN'",
")",
")"
] | https://github.com/francescou/docker-compose-ui/blob/8fc85c53afbbcc1fc121c7de4f2e07003fa2dfde/main.py#L404-L408 |
|
depjs/dep | cb8def92812d80b1fd8e5ffbbc1ae129a207fff6 | node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py | python | XcodeSettings._GetStandaloneBinaryPath | (self) | return target_prefix + target + target_ext | Returns the name of the non-bundle binary represented by this target.
E.g. hello_world. Only valid for non-bundles. | Returns the name of the non-bundle binary represented by this target.
E.g. hello_world. Only valid for non-bundles. | [
"Returns",
"the",
"name",
"of",
"the",
"non",
"-",
"bundle",
"binary",
"represented",
"by",
"this",
"target",
".",
"E",
".",
"g",
".",
"hello_world",
".",
"Only",
"valid",
"for",
"non",
"-",
"bundles",
"."
] | def _GetStandaloneBinaryPath(self):
"""Returns the name of the non-bundle binary represented by this target.
E.g. hello_world. Only valid for non-bundles."""
assert not self._IsBundle()
assert self.spec["type"] in (
"executable",
"shared_library",
"static_library",
"loadable_module",
), ("Unexpected type %s" % self.spec["type"])
target = self.spec["target_name"]
if self.spec["type"] == "static_library":
if target[:3] == "lib":
target = target[3:]
elif self.spec["type"] in ("loadable_module", "shared_library"):
if target[:3] == "lib":
target = target[3:]
target_prefix = self._GetStandaloneExecutablePrefix()
target = self.spec.get("product_name", target)
target_ext = self._GetStandaloneExecutableSuffix()
return target_prefix + target + target_ext | [
"def",
"_GetStandaloneBinaryPath",
"(",
"self",
")",
":",
"assert",
"not",
"self",
".",
"_IsBundle",
"(",
")",
"assert",
"self",
".",
"spec",
"[",
"\"type\"",
"]",
"in",
"(",
"\"executable\"",
",",
"\"shared_library\"",
",",
"\"static_library\"",
",",
"\"loadable_module\"",
",",
")",
",",
"(",
"\"Unexpected type %s\"",
"%",
"self",
".",
"spec",
"[",
"\"type\"",
"]",
")",
"target",
"=",
"self",
".",
"spec",
"[",
"\"target_name\"",
"]",
"if",
"self",
".",
"spec",
"[",
"\"type\"",
"]",
"==",
"\"static_library\"",
":",
"if",
"target",
"[",
":",
"3",
"]",
"==",
"\"lib\"",
":",
"target",
"=",
"target",
"[",
"3",
":",
"]",
"elif",
"self",
".",
"spec",
"[",
"\"type\"",
"]",
"in",
"(",
"\"loadable_module\"",
",",
"\"shared_library\"",
")",
":",
"if",
"target",
"[",
":",
"3",
"]",
"==",
"\"lib\"",
":",
"target",
"=",
"target",
"[",
"3",
":",
"]",
"target_prefix",
"=",
"self",
".",
"_GetStandaloneExecutablePrefix",
"(",
")",
"target",
"=",
"self",
".",
"spec",
".",
"get",
"(",
"\"product_name\"",
",",
"target",
")",
"target_ext",
"=",
"self",
".",
"_GetStandaloneExecutableSuffix",
"(",
")",
"return",
"target_prefix",
"+",
"target",
"+",
"target_ext"
] | https://github.com/depjs/dep/blob/cb8def92812d80b1fd8e5ffbbc1ae129a207fff6/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py#L473-L494 |
|
replit-archive/jsrepl | 36d79b6288ca5d26208e8bade2a168c6ebcb2376 | extern/python/reloop-closured/lib/python2.7/SocketServer.py | python | TCPServer.__init__ | (self, server_address, RequestHandlerClass, bind_and_activate=True) | Constructor. May be extended, do not override. | Constructor. May be extended, do not override. | [
"Constructor",
".",
"May",
"be",
"extended",
"do",
"not",
"override",
"."
] | def __init__(self, server_address, RequestHandlerClass, bind_and_activate=True):
"""Constructor. May be extended, do not override."""
BaseServer.__init__(self, server_address, RequestHandlerClass)
self.socket = socket.socket(self.address_family,
self.socket_type)
if bind_and_activate:
self.server_bind()
self.server_activate() | [
"def",
"__init__",
"(",
"self",
",",
"server_address",
",",
"RequestHandlerClass",
",",
"bind_and_activate",
"=",
"True",
")",
":",
"BaseServer",
".",
"__init__",
"(",
"self",
",",
"server_address",
",",
"RequestHandlerClass",
")",
"self",
".",
"socket",
"=",
"socket",
".",
"socket",
"(",
"self",
".",
"address_family",
",",
"self",
".",
"socket_type",
")",
"if",
"bind_and_activate",
":",
"self",
".",
"server_bind",
"(",
")",
"self",
".",
"server_activate",
"(",
")"
] | https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/reloop-closured/lib/python2.7/SocketServer.py#L402-L409 |
||
GoogleCloudPlatform/PerfKitExplorer | 9efa61015d50c25f6d753f0212ad3bf16876d496 | third_party/py/apiclient/http.py | python | MediaDownloadProgress.progress | (self) | Percent of download completed, as a float.
Returns:
the percentage complete as a float, returning 0.0 if the total size of
the download is unknown. | Percent of download completed, as a float. | [
"Percent",
"of",
"download",
"completed",
"as",
"a",
"float",
"."
] | def progress(self):
"""Percent of download completed, as a float.
Returns:
the percentage complete as a float, returning 0.0 if the total size of
the download is unknown.
"""
if self.total_size is not None:
return float(self.resumable_progress) / float(self.total_size)
else:
return 0.0 | [
"def",
"progress",
"(",
"self",
")",
":",
"if",
"self",
".",
"total_size",
"is",
"not",
"None",
":",
"return",
"float",
"(",
"self",
".",
"resumable_progress",
")",
"/",
"float",
"(",
"self",
".",
"total_size",
")",
"else",
":",
"return",
"0.0"
] | https://github.com/GoogleCloudPlatform/PerfKitExplorer/blob/9efa61015d50c25f6d753f0212ad3bf16876d496/third_party/py/apiclient/http.py#L100-L110 |
||
atom-community/ide-python | c046f9c2421713b34baa22648235541c5bb284fe | dist/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/pydevd_attach_to_process/winappdbg/breakpoint.py | python | _BreakpointContainer.__clear_buffer_watch | (self, bw) | Used by L{dont_watch_buffer} and L{dont_stalk_buffer}.
@type bw: L{BufferWatch}
@param bw: Buffer watch identifier. | Used by L{dont_watch_buffer} and L{dont_stalk_buffer}. | [
"Used",
"by",
"L",
"{",
"dont_watch_buffer",
"}",
"and",
"L",
"{",
"dont_stalk_buffer",
"}",
"."
] | def __clear_buffer_watch(self, bw):
"""
Used by L{dont_watch_buffer} and L{dont_stalk_buffer}.
@type bw: L{BufferWatch}
@param bw: Buffer watch identifier.
"""
# Get the PID and the start and end addresses of the buffer.
pid = bw.pid
start = bw.start
end = bw.end
# Get the base address and size in pages required for the buffer.
base = MemoryAddresses.align_address_to_page_start(start)
limit = MemoryAddresses.align_address_to_page_end(end)
pages = MemoryAddresses.get_buffer_size_in_pages(start, end - start)
# For each page, get the breakpoint and it's condition object.
# For each condition, remove the buffer.
# For each breakpoint, if no buffers are on watch, erase it.
cset = set() # condition objects
page_addr = base
pageSize = MemoryAddresses.pageSize
while page_addr < limit:
if self.has_page_breakpoint(pid, page_addr):
bp = self.get_page_breakpoint(pid, page_addr)
condition = bp.get_condition()
if condition not in cset:
if not isinstance(condition, _BufferWatchCondition):
# this shouldn't happen unless you tinkered with it
# or defined your own page breakpoints manually.
continue
cset.add(condition)
condition.remove(bw)
if condition.count() == 0:
try:
self.erase_page_breakpoint(pid, bp.get_address())
except WindowsError:
msg = "Cannot remove page breakpoint at address %s"
msg = msg % HexDump.address( bp.get_address() )
warnings.warn(msg, BreakpointWarning)
page_addr = page_addr + pageSize | [
"def",
"__clear_buffer_watch",
"(",
"self",
",",
"bw",
")",
":",
"# Get the PID and the start and end addresses of the buffer.",
"pid",
"=",
"bw",
".",
"pid",
"start",
"=",
"bw",
".",
"start",
"end",
"=",
"bw",
".",
"end",
"# Get the base address and size in pages required for the buffer.",
"base",
"=",
"MemoryAddresses",
".",
"align_address_to_page_start",
"(",
"start",
")",
"limit",
"=",
"MemoryAddresses",
".",
"align_address_to_page_end",
"(",
"end",
")",
"pages",
"=",
"MemoryAddresses",
".",
"get_buffer_size_in_pages",
"(",
"start",
",",
"end",
"-",
"start",
")",
"# For each page, get the breakpoint and it's condition object.",
"# For each condition, remove the buffer.",
"# For each breakpoint, if no buffers are on watch, erase it.",
"cset",
"=",
"set",
"(",
")",
"# condition objects",
"page_addr",
"=",
"base",
"pageSize",
"=",
"MemoryAddresses",
".",
"pageSize",
"while",
"page_addr",
"<",
"limit",
":",
"if",
"self",
".",
"has_page_breakpoint",
"(",
"pid",
",",
"page_addr",
")",
":",
"bp",
"=",
"self",
".",
"get_page_breakpoint",
"(",
"pid",
",",
"page_addr",
")",
"condition",
"=",
"bp",
".",
"get_condition",
"(",
")",
"if",
"condition",
"not",
"in",
"cset",
":",
"if",
"not",
"isinstance",
"(",
"condition",
",",
"_BufferWatchCondition",
")",
":",
"# this shouldn't happen unless you tinkered with it",
"# or defined your own page breakpoints manually.",
"continue",
"cset",
".",
"add",
"(",
"condition",
")",
"condition",
".",
"remove",
"(",
"bw",
")",
"if",
"condition",
".",
"count",
"(",
")",
"==",
"0",
":",
"try",
":",
"self",
".",
"erase_page_breakpoint",
"(",
"pid",
",",
"bp",
".",
"get_address",
"(",
")",
")",
"except",
"WindowsError",
":",
"msg",
"=",
"\"Cannot remove page breakpoint at address %s\"",
"msg",
"=",
"msg",
"%",
"HexDump",
".",
"address",
"(",
"bp",
".",
"get_address",
"(",
")",
")",
"warnings",
".",
"warn",
"(",
"msg",
",",
"BreakpointWarning",
")",
"page_addr",
"=",
"page_addr",
"+",
"pageSize"
] | https://github.com/atom-community/ide-python/blob/c046f9c2421713b34baa22648235541c5bb284fe/dist/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/pydevd_attach_to_process/winappdbg/breakpoint.py#L4477-L4519 |
||
evilsocket/pwnagotchi | cd50cf74186b99b39b34ca953e3ce7c2bb14bfa6 | pwnagotchi/ui/hw/libs/waveshare/lcdhat/ST7789.py | python | ST7789.reset | (self) | Reset the display | Reset the display | [
"Reset",
"the",
"display"
] | def reset(self):
"""Reset the display"""
GPIO.output(self._rst, GPIO.HIGH)
time.sleep(0.01)
GPIO.output(self._rst, GPIO.LOW)
time.sleep(0.01)
GPIO.output(self._rst, GPIO.HIGH)
time.sleep(0.01) | [
"def",
"reset",
"(",
"self",
")",
":",
"GPIO",
".",
"output",
"(",
"self",
".",
"_rst",
",",
"GPIO",
".",
"HIGH",
")",
"time",
".",
"sleep",
"(",
"0.01",
")",
"GPIO",
".",
"output",
"(",
"self",
".",
"_rst",
",",
"GPIO",
".",
"LOW",
")",
"time",
".",
"sleep",
"(",
"0.01",
")",
"GPIO",
".",
"output",
"(",
"self",
".",
"_rst",
",",
"GPIO",
".",
"HIGH",
")",
"time",
".",
"sleep",
"(",
"0.01",
")"
] | https://github.com/evilsocket/pwnagotchi/blob/cd50cf74186b99b39b34ca953e3ce7c2bb14bfa6/pwnagotchi/ui/hw/libs/waveshare/lcdhat/ST7789.py#L117-L124 |
||
thinreports/thinreports-editor | c9c9d651eb088886ce5c531f2e3dd801de285cc0 | basic-editor/vendor/closure-library/closure/bin/calcdeps.py | python | FilterByExcludes | (options, files) | return [i for i in files if not i in excludesSet] | Filters the given files by the exlusions specified at the command line.
Args:
options: The flags to calcdeps.
files: The files to filter.
Returns:
A list of files. | Filters the given files by the exlusions specified at the command line. | [
"Filters",
"the",
"given",
"files",
"by",
"the",
"exlusions",
"specified",
"at",
"the",
"command",
"line",
"."
] | def FilterByExcludes(options, files):
"""Filters the given files by the exlusions specified at the command line.
Args:
options: The flags to calcdeps.
files: The files to filter.
Returns:
A list of files.
"""
excludes = []
if options.excludes:
excludes = ExpandDirectories(options.excludes)
excludesSet = set(excludes)
return [i for i in files if not i in excludesSet] | [
"def",
"FilterByExcludes",
"(",
"options",
",",
"files",
")",
":",
"excludes",
"=",
"[",
"]",
"if",
"options",
".",
"excludes",
":",
"excludes",
"=",
"ExpandDirectories",
"(",
"options",
".",
"excludes",
")",
"excludesSet",
"=",
"set",
"(",
"excludes",
")",
"return",
"[",
"i",
"for",
"i",
"in",
"files",
"if",
"not",
"i",
"in",
"excludesSet",
"]"
] | https://github.com/thinreports/thinreports-editor/blob/c9c9d651eb088886ce5c531f2e3dd801de285cc0/basic-editor/vendor/closure-library/closure/bin/calcdeps.py#L397-L411 |
|
openwisp/django-netjsonconfig | 55ab56245cf263f331c08e421c37c54ad44024cc | django_netjsonconfig/base/base.py | python | BaseConfig.checksum | (self) | return hashlib.md5(config).hexdigest() | returns checksum of configuration | returns checksum of configuration | [
"returns",
"checksum",
"of",
"configuration"
] | def checksum(self):
"""
returns checksum of configuration
"""
config = self.generate().getvalue()
return hashlib.md5(config).hexdigest() | [
"def",
"checksum",
"(",
"self",
")",
":",
"config",
"=",
"self",
".",
"generate",
"(",
")",
".",
"getvalue",
"(",
")",
"return",
"hashlib",
".",
"md5",
"(",
"config",
")",
".",
"hexdigest",
"(",
")"
] | https://github.com/openwisp/django-netjsonconfig/blob/55ab56245cf263f331c08e421c37c54ad44024cc/django_netjsonconfig/base/base.py#L192-L197 |
|
ireaderlab/zkdash | a9e27e9cc63dcfbb483a1fdfa00c98fd0b079739 | lib/db/database.py | python | Database.connect | (self) | 主从建立连接,如果连接关闭重试 | 主从建立连接,如果连接关闭重试 | [
"主从建立连接",
"如果连接关闭重试"
] | def connect(self):
'''主从建立连接,如果连接关闭重试
'''
i = 0
while i < 4:
try:
if self.database.is_closed():
self.database.get_conn().ping(True)
break
except OperationalError:
self.close()
i = i + 1 | [
"def",
"connect",
"(",
"self",
")",
":",
"i",
"=",
"0",
"while",
"i",
"<",
"4",
":",
"try",
":",
"if",
"self",
".",
"database",
".",
"is_closed",
"(",
")",
":",
"self",
".",
"database",
".",
"get_conn",
"(",
")",
".",
"ping",
"(",
"True",
")",
"break",
"except",
"OperationalError",
":",
"self",
".",
"close",
"(",
")",
"i",
"=",
"i",
"+",
"1"
] | https://github.com/ireaderlab/zkdash/blob/a9e27e9cc63dcfbb483a1fdfa00c98fd0b079739/lib/db/database.py#L74-L85 |
||
OWASP/SecureTea-Project | ae55082d4a342f10099db4dead23267a517e1a66 | securetea/lib/antivirus/antivirus_logger.py | python | AntiVirusLogger.printinfo | (self, message) | Over-ride the parent class printinfo method.
Args:
message (str): Message to log
Raises:
None
Returns:
None | Over-ride the parent class printinfo method. | [
"Over",
"-",
"ride",
"the",
"parent",
"class",
"printinfo",
"method",
"."
] | def printinfo(self, message):
"""
Over-ride the parent class printinfo method.
Args:
message (str): Message to log
Raises:
None
Returns:
None
"""
# Call the parent method
super().printinfo(message)
self.write_data(message) | [
"def",
"printinfo",
"(",
"self",
",",
"message",
")",
":",
"# Call the parent method",
"super",
"(",
")",
".",
"printinfo",
"(",
"message",
")",
"self",
".",
"write_data",
"(",
"message",
")"
] | https://github.com/OWASP/SecureTea-Project/blob/ae55082d4a342f10099db4dead23267a517e1a66/securetea/lib/antivirus/antivirus_logger.py#L60-L75 |
||
almonk/Bind | 03e9e98fb8b30a58cb4fc2829f06289fa9958897 | public/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py | python | VisualStudioVersion.ProjectExtension | (self) | return self.uses_vcxproj and '.vcxproj' or '.vcproj' | Returns the file extension for the project. | Returns the file extension for the project. | [
"Returns",
"the",
"file",
"extension",
"for",
"the",
"project",
"."
] | def ProjectExtension(self):
"""Returns the file extension for the project."""
return self.uses_vcxproj and '.vcxproj' or '.vcproj' | [
"def",
"ProjectExtension",
"(",
"self",
")",
":",
"return",
"self",
".",
"uses_vcxproj",
"and",
"'.vcxproj'",
"or",
"'.vcproj'"
] | https://github.com/almonk/Bind/blob/03e9e98fb8b30a58cb4fc2829f06289fa9958897/public/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py#L54-L56 |
|
algorithmiaio/sample-apps | a5c90698c09c61febcd03d922d47dc437a1f4cdc | recipes/color-extraction/color_extraction_recipe.py | python | get_image | (url) | Retrieve images from site. | Retrieve images from site. | [
"Retrieve",
"images",
"from",
"site",
"."
] | def get_image(url):
"""Retrieve images from site."""
algo = client.algo("diego/Getimagelinks/0.1.0")
if url.startswith("http:") or url.startswith("https:"):
try:
response = algo.pipe(url).result
print(response)
return response
except Algorithmia.algo_response.AlgoException as e:
print(e)
else:
raise Exception("Please pass in a valid url") | [
"def",
"get_image",
"(",
"url",
")",
":",
"algo",
"=",
"client",
".",
"algo",
"(",
"\"diego/Getimagelinks/0.1.0\"",
")",
"if",
"url",
".",
"startswith",
"(",
"\"http:\"",
")",
"or",
"url",
".",
"startswith",
"(",
"\"https:\"",
")",
":",
"try",
":",
"response",
"=",
"algo",
".",
"pipe",
"(",
"url",
")",
".",
"result",
"print",
"(",
"response",
")",
"return",
"response",
"except",
"Algorithmia",
".",
"algo_response",
".",
"AlgoException",
"as",
"e",
":",
"print",
"(",
"e",
")",
"else",
":",
"raise",
"Exception",
"(",
"\"Please pass in a valid url\"",
")"
] | https://github.com/algorithmiaio/sample-apps/blob/a5c90698c09c61febcd03d922d47dc437a1f4cdc/recipes/color-extraction/color_extraction_recipe.py#L8-L19 |
||
SEL-Columbia/formhub | 578fc2c5e9febe8dc68b37f7d2e85a76dc2c4c04 | utils/model_tools.py | python | queryset_iterator | (queryset, chunksize=100) | Iterate over a Django Queryset.
This method loads a maximum of chunksize (default: 100) rows in
its memory at the same time while django normally would load all
rows in its memory. Using the iterator() method only causes it to
not preload all the classes. | Iterate over a Django Queryset. | [
"Iterate",
"over",
"a",
"Django",
"Queryset",
"."
] | def queryset_iterator(queryset, chunksize=100):
'''''
Iterate over a Django Queryset.
This method loads a maximum of chunksize (default: 100) rows in
its memory at the same time while django normally would load all
rows in its memory. Using the iterator() method only causes it to
not preload all the classes.
'''
start = 0
end = chunksize
while start < queryset.count():
for row in queryset[start:end]:
yield row
start += chunksize
end += chunksize
gc.collect() | [
"def",
"queryset_iterator",
"(",
"queryset",
",",
"chunksize",
"=",
"100",
")",
":",
"start",
"=",
"0",
"end",
"=",
"chunksize",
"while",
"start",
"<",
"queryset",
".",
"count",
"(",
")",
":",
"for",
"row",
"in",
"queryset",
"[",
"start",
":",
"end",
"]",
":",
"yield",
"row",
"start",
"+=",
"chunksize",
"end",
"+=",
"chunksize",
"gc",
".",
"collect",
"(",
")"
] | https://github.com/SEL-Columbia/formhub/blob/578fc2c5e9febe8dc68b37f7d2e85a76dc2c4c04/utils/model_tools.py#L17-L33 |
||
ppetrid/yawd-admin | c010bb2bf1e0dda3d1f6491f76cf278ee113aaa9 | yawdadmin/sites.py | python | YawdAdminSite.register_options | (self, optionset_admin) | Allows an application to register admin options like so::
admin_site.register_options(OptionSetAdminClass) | Allows an application to register admin options like so:: | [
"Allows",
"an",
"application",
"to",
"register",
"admin",
"options",
"like",
"so",
"::"
] | def register_options(self, optionset_admin):
"""
Allows an application to register admin options like so::
admin_site.register_options(OptionSetAdminClass)
"""
global _optionset_labels
if not optionset_admin.optionset_label in _optionset_labels:
#Add admin optionset to the registry
_optionset_labels[optionset_admin.optionset_label] = optionset_admin
#Initialize options
optionset_admin() | [
"def",
"register_options",
"(",
"self",
",",
"optionset_admin",
")",
":",
"global",
"_optionset_labels",
"if",
"not",
"optionset_admin",
".",
"optionset_label",
"in",
"_optionset_labels",
":",
"#Add admin optionset to the registry",
"_optionset_labels",
"[",
"optionset_admin",
".",
"optionset_label",
"]",
"=",
"optionset_admin",
"#Initialize options",
"optionset_admin",
"(",
")"
] | https://github.com/ppetrid/yawd-admin/blob/c010bb2bf1e0dda3d1f6491f76cf278ee113aaa9/yawdadmin/sites.py#L326-L337 |