complexity
int64
1
56
n_identifiers
int64
1
114
code
stringlengths
19
12.7k
path
stringlengths
8
134
n_ast_nodes
int64
12
2.35k
ast_errors
stringlengths
0
4.01k
repo
stringlengths
3
28
documentation
dict
n_words
int64
2
866
language
stringclasses
1 value
vocab_size
int64
2
323
commit_id
stringlengths
40
40
file_name
stringlengths
5
79
id
int64
243
338k
nloc
int64
1
228
token_counts
int64
5
1.4k
fun_name
stringlengths
1
77
url
stringlengths
31
60
commit_message
stringlengths
3
15.3k
n_whitespaces
int64
1
3.23k
n_ast_errors
int64
0
20
d_id
int64
74
121k
ast_levels
int64
4
29
9
23
def quantize(self, colors=256, method=None, kmeans=0, palette=None, dither=1): self.load() if method is None: # defaults: method = Quantize.MEDIANCUT if self.mode == "RGBA": method = Quantize.FASTOCTREE if self.mode == "RGBA" and me...
src/PIL/Image.py
367
Pillow
{ "docstring": "\n Convert the image to 'P' mode with the specified number\n of colors.\n\n :param colors: The desired number of colors, <= 256\n :param method: :data:`Quantize.MEDIANCUT` (median cut),\n :data:`Quantize.MAXCOVERAGE` (maximum coverage),\n ...
145
Python
102
f8e4e9c2dd94c6f4789639dd891b8a6d5fb16e14
Image.py
242,236
32
222
quantize
https://github.com/python-pillow/Pillow.git
Added enums
514
0
69,798
12
1
18
def dummy_inputs(self) -> Dict[str, tf.Tensor]: VISION_DUMMY_INPUTS = tf.random.uniform(shape=(3, self.config.num_channels, 224, 224), dtype=tf.float32) return {"pixel_values": tf.constant(VISION_DUMMY_INPUTS)} RESNET_START_DOCSTRING = r RESNET_INPUTS_DOCSTRING = r @keras_serializable
src/transformers/models/resnet/modeling_tf_resnet.py
104
@keras_serializable
transformers
{ "docstring": "\n Dummy inputs to build the network. Returns:\n `Dict[str, tf.Tensor]`: The dummy inputs.\n \n This model is a TensorFlow\n [tf.keras.layers.Layer](https://www.tensorflow.org/api_docs/python/tf/keras/layers/Layer) sub-class. Use it as a\n regular TensorFlow Module an...
22
Python
19
77ea5130a1cd7de36796cc4d1bae6f21094d8863
modeling_tf_resnet.py
32,050
7
56
dummy_inputs
https://github.com/huggingface/transformers.git
Add TF ResNet model (#17427) * Rought TF conversion outline * Tidy up * Fix padding differences between layers * Add back embedder - whoops * Match test file to main * Match upstream test file * Correctly pass and assign image_size parameter Co-authored-by: Sayak Paul <spsayakpaul@gmail.com> * ...
40
1
5,843
12
2
14
def __reduce__(self) -> Tuple[Any, Any, Any]: pickled_state = super(FeatureArray, self).__reduce__() if isinstance(pickled_state, str): raise TypeError("np array __reduce__ returned string instead of tuple.") new_state = pickled_state[2] + ( self.number_of_dimens...
rasa/utils/tensorflow/model_data.py
110
rasa
{ "docstring": "Needed in order to pickle this object.\n\n Returns:\n A tuple.\n ", "language": "en", "n_whitespaces": 35, "n_words": 10, "vocab_size": 10 }
35
Python
33
4cdceaab5271a5b51463ec562c8eb55f96b771c5
model_data.py
159,676
15
73
__reduce__
https://github.com/RasaHQ/rasa.git
Bump numpy from 1.19.5 to 1.21.6 (#11078) * Bump numpy from 1.19.5 to 1.21.6 Bumps [numpy](https://github.com/numpy/numpy) from 1.19.5 to 1.21.6. - [Release notes](https://github.com/numpy/numpy/releases) - [Changelog](https://github.com/numpy/numpy/blob/main/doc/HOWTO_RELEASE.rst.txt) - [Commits](https://github...
121
0
38,392
10
1
17
def test_key_query_cancellation(self) -> None: self.register_user("alice", "wonderland") alice_token = self.login("alice", "wonderland") bob = self.register_user("bob", "uncle") channel = make_request_with_cancellation_test( "test_key_query_cancellation", ...
tests/rest/client/test_keys.py
177
synapse
{ "docstring": "\n Tests that /keys/query is cancellable and does not swallow the\n CancelledError.\n ", "language": "en", "n_whitespaces": 33, "n_words": 11, "vocab_size": 11 }
47
Python
42
d3d9ca156e323fe194b1bcb1af1628f65a2f3c1c
test_keys.py
249,472
23
104
test_key_query_cancellation
https://github.com/matrix-org/synapse.git
Cancel the processing of key query requests when they time out. (#13680)
259
0
72,939
13
2
3
def test_normal_operation(self, ray_instance):
python/ray/serve/tests/test_standalone2.py
15
ray
{ "docstring": "Checks that a moderate timeout doesn't affect normal operation.", "language": "en", "n_whitespaces": 8, "n_words": 9, "vocab_size": 9 }
3
Python
3
700618d0dbd27282ce72441d22a0000075b7a54f
test_standalone2.py
135,946
7
52
test_normal_operation
https://github.com/ray-project/ray.git
[Serve] Add the `SERVE_REQUEST_PROCESSING_TIMEOUT_S` environment variable (#29534)
10
0
30,778
6
1
8
async def test_slug(hass, caplog): result = slug(hass, "http://127.0.0.2/testurl/{{1/0}}") assert result is None assert "Syntax error in" in caplog.text @respx.mock
tests/components/generic/test_config_flow.py
54
@respx.mock
core
{ "docstring": "\n Test that the slug function generates an error in case of invalid template.\n\n Other paths in the slug function are already tested by other tests.\n ", "language": "en", "n_whitespaces": 35, "n_words": 25, "vocab_size": 21 }
19
Python
17
e64336cb91d1ce97ac82c57e98477acedfcbcf71
test_config_flow.py
314,918
4
26
test_slug
https://github.com/home-assistant/core.git
Allow configuring username and password in generic camera config flow (#73804) * Add ability to use user & pw not in stream url * Increase test coverage to 100% * Increase test coverage * Verify that stream source includes user:pass * Code review: refactor test to use MockConfigEntry * Code review: Impr...
30
1
113,519
9
8
9
def _get_text_feature_max_length(config, training_set_metadata) -> int: max_length = 0 for feature in config["input_features"]: if feature["type"] == TEXT: feature_max_len = training_set_metadata[feature["name"]]["word_max_sequence_length"] if feature_max_len > max_length: ...
ludwig/automl/auto_tune_config.py
190
ludwig
{ "docstring": "Returns max sequence length over text features, subject to preprocessing limit.", "language": "en", "n_whitespaces": 10, "n_words": 11, "vocab_size": 11 }
72
Python
45
d77aaf8da39f04a353a3a08fb699ae8a96ffea3a
auto_tune_config.py
6,396
19
110
_get_text_feature_max_length
https://github.com/ludwig-ai/ludwig.git
Improve AutoML heuristics for text classification (#1815) * Improve AutoML heuristics for text classification Co-authored-by: Anne Holler <anne@vmware.com>
184
0
970
14
3
8
async def wait(self) -> None: if self._is_set: return if not self._loop: self._loop = get_running_loop() self._event = asyncio.Event() await self._event.wait()
src/prefect/_internal/concurrency/primitives.py
78
prefect
{ "docstring": "\n Wait until the flag has been set.\n\n If the flag has already been set when this method is called, it returns immediately.\n ", "language": "en", "n_whitespaces": 44, "n_words": 22, "vocab_size": 18 }
19
Python
17
a368874d1b145c1ec5201e5efd3c26ce7c1e8611
primitives.py
60,126
12
44
wait
https://github.com/PrefectHQ/prefect.git
Add thread-safe async primitives `Event` and `Future` (#7865) Co-authored-by: Serina Grill <42048900+serinamarie@users.noreply.github.com>
80
0
11,991
10
3
22
def generic_parser(parse_func, *cols) -> np.ndarray: warnings.warn( "Use pd.to_datetime instead.", FutureWarning, stacklevel=find_stack_level(inspect.currentframe()), ) N = _check_columns(cols) results = np.empty(N, dtype=object) for i in range(N): args = [c[i...
pandas/io/date_converters.py
131
pandas
{ "docstring": "\n Use dateparser to parse columns with data information into a single datetime column.\n\n .. deprecated:: 1.2\n ", "language": "en", "n_whitespaces": 26, "n_words": 16, "vocab_size": 16 }
35
Python
29
6787b8b73f4c54a0cf742a90433e6fb6c7edb231
date_converters.py
168,932
17
83
generic_parser
https://github.com/pandas-dev/pandas.git
TST: Address/catch more test warnings (#48358)
91
0
40,344
12
1
2
def computed(self): return self["computed"]
packages/python/plotly/plotly/graph_objs/_layout.py
22
plotly.py
{ "docstring": "\n Placeholder for exporting automargin-impacting values namely\n `margin.t`, `margin.b`, `margin.l` and `margin.r` in \"full-\n json\" mode.\n\n The 'computed' property accepts values of any type\n\n Returns\n -------\n Any\n ", "language": "e...
4
Python
4
43e3a4011080911901176aab919c0ecf5046ddd3
_layout.py
227,377
2
11
computed
https://github.com/plotly/plotly.py.git
switch to black .22
18
0
59,050
7
3
13
def profile(event_type, extra_data=None): if not PROFILING_ENABLED: return NULL_LOG_SPAN worker = ray.worker.global_worker if worker.mode == ray.worker.LOCAL_MODE: return NULL_LOG_SPAN return worker.core_worker.profile_event(event_type.encode("ascii"), extra_data)
python/ray/_private/profiling.py
85
ray
{ "docstring": "Profile a span of time so that it appears in the timeline visualization.\n\n Note that this only works in the raylet code path.\n\n This function can be used as follows (both on the driver or within a task).\n\n .. code-block:: python\n import ray._private.profiling as profiling\n\n ...
20
Python
16
7f1bacc7dc9caf6d0ec042e39499bbf1d9a7d065
profiling.py
130,162
7
52
profile
https://github.com/ray-project/ray.git
[CI] Format Python code with Black (#21975) See #21316 and #21311 for the motivation behind these changes.
49
0
29,129
10
9
19
def mathieu_even_coef(m, q): r if not (isscalar(m) and isscalar(q)): raise ValueError("m and q must be scalars.") if (q < 0): raise ValueError("q >=0") if (m != floor(m)) or (m < 0): raise ValueError("m must be an integer >=0.") if (q <= 1): qm = 7.5 + 56.1*sqrt(q) -...
scipy/special/_basic.py
304
scipy
{ "docstring": "Fourier coefficients for even Mathieu and modified Mathieu functions.\n\n The Fourier series of the even solutions of the Mathieu differential\n equation are of the form\n\n .. math:: \\mathrm{ce}_{2n}(z, q) = \\sum_{k=0}^{\\infty} A_{(2n)}^{(2k)} \\cos 2kz\n\n .. math:: \\mathrm{ce}_{2n+1...
101
Python
70
4871f3d1c61bdb296ae03e3480f5f584f5c67256
_basic.py
241,808
55
205
mathieu_even_coef
https://github.com/scipy/scipy.git
MAINT: optimize, special, signal: Use custom warnings instead of print statements (#15259) Co-authored-by: Pamphile Roy <roy.pamphile@gmail.com> Co-authored-by: Tirth Patel <tirthasheshpatel@gmail.com>
191
0
69,704
15
3
18
def prefetch_renditions(self, *filters): # Get a list of filter spec strings. The given value could contain Filter objects filter_specs = [ filter.spec if isinstance(filter, Filter) else filter for filter in filters ] rendition_model = self.model.get_rendition_model...
wagtail/images/models.py
109
wagtail
{ "docstring": "\n Prefetches generated renditions for the given filters.\n ", "language": "en", "n_whitespaces": 22, "n_words": 7, "vocab_size": 7 }
43
Python
39
52ace9eae7311fa708dd19a7d6b6cabfb36a8fee
models.py
77,627
12
68
prefetch_renditions
https://github.com/wagtail/wagtail.git
Add prefetch_renditions method on Image queryset manager Update logic when creating and looking for a rendtion
170
0
16,680
13
1
11
def test_dataset_shard_with_loader_fn(self): dset = ray.data.range(100) config = {"input": "dataset", "input_config": {"loader_fn": lambda: dset}} ret_dataset, _ = get_dataset_and_shards(config) assert ret_dataset.count() == dset.count()
rllib/offline/tests/test_dataset_reader.py
95
ray
{ "docstring": "Tests whether the dataset_shard function works correctly with loader_fn.", "language": "en", "n_whitespaces": 8, "n_words": 9, "vocab_size": 9 }
21
Python
19
569fe0109629048d08e1d9e023f7769f10bd2244
test_dataset_reader.py
125,008
5
53
test_dataset_shard_with_loader_fn
https://github.com/ray-project/ray.git
[RLlib] improved unittests for dataset_reader and fixed bugs (#26458)
56
0
27,747
11
3
9
def _preprocess_conv3d_input(x, data_format): tf_data_format = "NDHWC" if data_format == "channels_first": if not _has_nchw_support(): x = tf.compat.v1.transpose(x, (0, 2, 3, 4, 1)) else: tf_data_format = "NCDHW" return x, tf_data_format
keras/backend.py
92
keras
{ "docstring": "Transpose and cast the input before the conv3d.\n\n Args:\n x: input tensor.\n data_format: string, `\"channels_last\"` or `\"channels_first\"`.\n\n Returns:\n A tensor.\n ", "language": "en", "n_whitespaces": 50, "n_words": 20, "vocab_size": 17 }
28
Python
23
84afc5193d38057e2e2badf9c889ea87d80d8fbf
backend.py
269,509
8
55
_preprocess_conv3d_input
https://github.com/keras-team/keras.git
Reformatting the codebase with black. PiperOrigin-RevId: 450093126
76
0
80,140
14
1
12
async def test_get_application_credentials(hass): test_1_integration = _get_test_integration(hass, "test_1", True) test_2_integration = _get_test_integration_with_application_credentials( hass, "test_2" ) with patch("homeassistant.loader.async_get_custom_components") as mock_get: m...
tests/test_loader.py
118
core
{ "docstring": "Verify that custom components with application_credentials are found.", "language": "en", "n_whitespaces": 7, "n_words": 8, "vocab_size": 8 }
39
Python
31
ae01ec02e28d4b83ef64636e36de2baf59c19874
test_loader.py
299,612
13
64
test_get_application_credentials
https://github.com/home-assistant/core.git
Allow custom integrations to support application_credentials platform (#71129)
118
0
98,529
11
2
12
def start_reaper_process(self): assert ( not self.kernel_fate_share ), "a reaper should not be used with kernel fate-sharing" process_info = ray._private.services.start_reaper(fate_share=False) assert ray_constants.PROCESS_TYPE_REAPER not in self.all_processes ...
python/ray/node.py
91
ray
{ "docstring": "\n Start the reaper process.\n\n This must be the first process spawned and should only be called when\n ray processes should be cleaned up if this process dies.\n ", "language": "en", "n_whitespaces": 56, "n_words": 27, "vocab_size": 22 }
34
Python
28
7f1bacc7dc9caf6d0ec042e39499bbf1d9a7d065
node.py
130,804
10
57
start_reaper_process
https://github.com/ray-project/ray.git
[CI] Format Python code with Black (#21975) See #21316 and #21311 for the motivation behind these changes.
124
0
29,376
10
1
11
def get_best_result(self) -> Tuple[int, Module, Dict[str, Dict[str, Tensor]], float, List[Dict]]: raise NotImplementedError()
nni/compression/pytorch/base/scheduler.py
51
nni
{ "docstring": "\n Returns\n -------\n Tuple[int, Module, Dict[str, Dict[str, Tensor]], float, List[Dict]]\n Return the task result that has the best performance,\n inculde task id, the compact model, the masks on the compact model, score and config list used in this task.\n...
12
Python
11
d68c786ff81bad19c04619d6a999ff34aaa724e7
scheduler.py
113,578
9
36
get_best_result
https://github.com/microsoft/nni.git
[Compression] remove pruning v1 & refactor directory (#5228)
26
0
24,963
7
1
5
def register(cls, function, *call_args, **call_kwds):
modin/core/dataframe/algebra/map.py
23
modin
{ "docstring": "\n Build Map operator that will be performed across each partition.\n\n Parameters\n ----------\n function : callable(pandas.DataFrame) -> pandas.DataFrame\n Function that will be applied to the each partition.\n Function takes `pandas.DataFrame` and r...
5
Python
5
a6f47c8e1c27d85fc09926bb35c2f1a65a6d3e79
map.py
154,457
3
18
register
https://github.com/modin-project/modin.git
REFACTOR-#4942: remove call method in favor of register due to duplication (#4943) Signed-off-by: Myachev <anatoly.myachev@intel.com>
12
0
35,990
6
1
17
def test_product_types_query_ids_not_exists(user_api_client, category): query = NOT_EXISTS_IDS_COLLECTIONS_QUERY variables = {"filter": {"ids": ["fTEJRuFHU6fd2RU=", "2XwnQNNhwCdEjhP="]}} response = user_api_client.post_graphql(query, variables) content = get_graphql_content(response, ignore_errors=True)...
saleor/graphql/product/tests/queries/test_product_types_query.py
234
@pytest.mark.parametrize( "search, expected_names", ( ("", ["The best juices", "The best beers", "The worst beers"]), ("best", ["The best juices", "The best beers"]), ("worst", ["The worst beers"]), ("average", []), ), )
saleor
{ "docstring": "\n query($filters: ProductTypeFilterInput) {\n productTypes(first: 10, filter: $filters) {\n edges {\n node {\n name\n }\n }\n }\n }\n", "language": "en", "n_whitespaces": 76, "n_words": 17, "vocab_size": 11 }
72
Python
52
d90be220d6b687d08153934a51354011a3cb5ca1
test_product_types_query.py
29,299
9
81
test_product_types_query_ids_not_exists
https://github.com/saleor/saleor.git
Split test_product.py and test_variant.py into multiple files (#11173) * Split test_product.py into multiple files * Split test_variant.py into multiple files
130
1
5,214
12
2
12
def script_args(f): args = [ magic_arguments.argument( '--out', type=str, help= ), magic_arguments.argument( '--err', type=str, help= ), magic_arguments.argument( '--bg', action="store_true", help= ...
IPython/core/magics/script.py
174
@magics_class
ipython
{ "docstring": "single decorator for adding script argsThe variable in which to store stdout from the script.\n If the script is backgrounded, this will be the stdout *pipe*,\n instead of the stderr text itself and will not be auto closed.\n The variable in which to store stderr from ...
42
Python
27
ce62a7a4b2c97bf8a30e8074e8fc18103a0718a0
script.py
208,424
39
101
script_args
https://github.com/ipython/ipython.git
avoid deprecated get_event_loop use our own `async_helpers.get_asyncio_loop` to track the global event loop script magics use dedicated background asyncio loop instead of trying to work on the main loop, which may or may not exist _AsyncIOProxy wraps background script objects to transfer awaitables across loops onl...
243
1
52,322
11
3
22
def _descendants_with_perm(self, user, action): # Get the permission object corresponding to this action permission = self._get_permission_objects_for_actions([action]).first() # Get the collections that have a GroupCollectionPermission record # for this permission and any of t...
wagtail/core/permission_policies/collections.py
239
wagtail
{ "docstring": "\n Return a queryset of collections descended from a collection on which this user has\n a GroupCollectionPermission record for this action. Used for actions, like edit and\n delete where the user cannot modify the collection where they are granted permission.\n ", "langu...
117
Python
78
d10f15e55806c6944827d801cd9c2d53f5da4186
collections.py
73,937
18
141
_descendants_with_perm
https://github.com/wagtail/wagtail.git
Reformat with black
391
0
16,183
18
5
20
def queryables(self) -> dict[str, Any]: # mypy doesn't recognize DataFrame._AXIS_NAMES, so we re-write it here axis_names = {0: "index", 1: "columns"} # compute the values_axes queryables d1 = [(a.cname, a) for a in self.index_axes] d2 = [(axis_names[axis], None) for ax...
pandas/io/pytables.py
151
pandas
{ "docstring": "return a dict of the kinds allowable columns for this object", "language": "en", "n_whitespaces": 10, "n_words": 11, "vocab_size": 11 }
63
Python
52
050b3b815604652bc445d2487f6e1fc83eaa8d1f
pytables.py
169,378
9
98
queryables
https://github.com/pandas-dev/pandas.git
TYP: Upgrade mypy to 0.981 (#48871) Co-authored-by: Matthew Roeschke <10647082+mroeschke@users.noreply.github.com>
137
0
40,429
12
1
5
def get_tables(self) -> HandlerResponse: query = return self.native_query(query)
mindsdb/integrations/handlers/postgres_handler/postgres_handler.py
34
mindsdb
{ "docstring": "\n List all tabels in PostgreSQL without the system tables information_schema and pg_catalog\n \n SELECT\n table_schema,\n table_name,\n table_type\n FROM\n information_schema.tables\n WHERE\n ...
8
Python
8
f105dbf028004044995817384413b4cdffd7afe2
postgres_handler.py
115,044
16
18
get_tables
https://github.com/mindsdb/mindsdb.git
handlers
30
0
25,335
7
2
7
def decode_locale_str(x): # type: (bytes) -> str return x.decode(encoding=locale.getlocale()[1] or "utf-8", errors="replace")
scapy/utils.py
51
scapy
{ "docstring": "\n Decode bytes into a string using the system locale.\n Useful on Windows where it can be unusual (e.g. cp1252)\n ", "language": "en", "n_whitespaces": 29, "n_words": 19, "vocab_size": 19 }
12
Python
12
664f5985c24c2eb7645bf76327bd333fab5f92b4
utils.py
209,976
2
28
decode_locale_str
https://github.com/secdev/scapy.git
Automata: improve memory management (#3743) * Automata memory improvements (cleanup..) * Add docstrings
21
0
52,840
12
1
2
def label0(self): return self["label0"]
packages/python/plotly/plotly/graph_objs/_funnelarea.py
22
plotly.py
{ "docstring": "\n Alternate to `labels`. Builds a numeric set of labels. Use with\n `dlabel` where `label0` is the starting label and `dlabel` the\n step.\n\n The 'label0' property is a number and may be specified as:\n - An int or float\n\n Returns\n -------\n ...
4
Python
4
43e3a4011080911901176aab919c0ecf5046ddd3
_funnelarea.py
226,852
2
11
label0
https://github.com/plotly/plotly.py.git
switch to black .22
18
0
58,525
7
1
6
def spreadsheet(self) -> Spreadsheet: return self.client.open_by_key(self.spreadsheet_id)
airbyte-integrations/connectors/destination-google-sheets/destination_google_sheets/spreadsheet.py
33
airbyte
{ "docstring": "\n Returns pygsheets.Spreadsheet with opened target spreadsheet by key.\n ", "language": "en", "n_whitespaces": 23, "n_words": 8, "vocab_size": 8 }
6
Python
6
feb0d2f37803929a1ad0c723eea430f8cd6c201f
spreadsheet.py
5,081
5
19
spreadsheet
https://github.com/airbytehq/airbyte.git
🎉 New Destination: Implement `Destination Google Sheets` using CDK (#12135)
20
0
720
8
1
5
def require_sentencepiece(test_case): return unittest.skipUnless(is_sentencepiece_available(), "test requires SentencePiece")(test_case)
src/transformers/testing_utils.py
37
transformers
{ "docstring": "\n Decorator marking a test that requires SentencePiece. These tests are skipped when SentencePiece isn't installed.\n ", "language": "en", "n_whitespaces": 22, "n_words": 15, "vocab_size": 15 }
7
Python
7
57e6464ac9a31156f1c93e59107323e6ec01309e
testing_utils.py
37,507
2
20
require_sentencepiece
https://github.com/huggingface/transformers.git
Update all require decorators to use skipUnless when possible (#16999)
13
0
6,812
10
1
46
def test_migrate_plugin(self): project2 = self.create_project( name="hellbar", organization=self.organization, teams=[self.team] ) plugin2 = JiraPlugin() plugin2.set_option("enabled", True, project2) plugin2.set_option("default_project", "BAR", project2) ...
tests/sentry/integrations/jira/test_integration.py
636
sentry
{ "docstring": "Test that 2 projects with the Jira plugin enabled that each have an issue created\n from the plugin are migrated along with the ignored fields\n ", "language": "en", "n_whitespaces": 39, "n_words": 25, "vocab_size": 20 }
98
Python
58
f5e5a3b1ed97383e0699aff9eb0363e9eb5db479
test_integration.py
94,417
41
366
test_migrate_plugin
https://github.com/getsentry/sentry.git
feat(Jira): Plugin issue migration endpoint (#37577) * feat(jira): Plugin issue migration endpoint
433
0
19,084
15
1
14
def inspect(): profile = prefect.context.get_profile_context() name, env = profile.name, profile.env console.out(toml.dumps({name: env}).strip()) @profile_app.command()
src/prefect/cli/profile.py
86
@profile_app.command()
prefect
{ "docstring": "\n View settings in the current profile.\n\n Use `prefect --profile <name> profile inspect` to get settings for another profile.\n ", "language": "en", "n_whitespaces": 28, "n_words": 18, "vocab_size": 16 }
13
Python
12
cb7814344ff2e34bafbd3a0c78e1c1ff41bb74c8
profile.py
53,448
4
44
inspect
https://github.com/PrefectHQ/prefect.git
Add `prefect profile set/unset/inspect/ls`
24
1
10,811
13
6
25
def downgrade(): conn = op.get_bind() if conn.dialect.name == 'sqlite': op.execute('PRAGMA foreign_keys=OFF') with op.batch_alter_table('ab_view_menu', schema=None) as batch_op: batch_op.drop_constraint('ab_view_menu_name_uq', type_='unique') op.execute('PRAGMA foreign_k...
airflow/migrations/versions/0106_909884dea523_update_migration_for_fab_tables_to_add_missing_constraints.py
669
airflow
{ "docstring": "Unapply Update migration for FAB tables to add missing constraints", "language": "en", "n_whitespaces": 9, "n_words": 10, "vocab_size": 10 }
121
Python
55
2f5a567977e1219cab16c2548825a1b9eba07ab3
0106_909884dea523_update_migration_for_fab_tables_to_add_missing_constraints.py
46,542
30
393
downgrade
https://github.com/apache/airflow.git
Use Airflow.Base.metadata in FAB models (#22353) Since FAB models are now in airflow, it makes sense to monitor changes in them. Therefore we use Airflow.models.base.Base.metadata for FAB models
413
0
8,920
16
1
2
def post_display_hook(self) -> None:
src/textual/app.py
16
textual
{ "docstring": "Called immediately after a display is done. Used in tests.", "language": "en", "n_whitespaces": 9, "n_words": 10, "vocab_size": 10 }
4
Python
4
39a764f49fff7ec3363b8ea25fce3fbf1b67ca58
app.py
185,720
2
8
post_display_hook
https://github.com/Textualize/textual.git
call later
11
0
45,129
6
1
8
def approx_standard_normal_cdf(x): return 0.5 * (1.0 + paddle.tanh(np.sqrt(2.0 / np.pi) * (x + 0.044715 * paddle.pow(x, 3))))
modules/image/text_to_image/disco_diffusion_cnclip_vitb16/reverse_diffusion/model/losses.py
69
PaddleHub
{ "docstring": "\n A fast approximation of the cumulative distribution function of the\n standard normal.\n ", "language": "en", "n_whitespaces": 22, "n_words": 12, "vocab_size": 10 }
17
Python
14
f4d6e64cdc132ae868699a0ba442f4ab1d304a14
losses.py
49,801
2
51
approx_standard_normal_cdf
https://github.com/PaddlePaddle/PaddleHub.git
add disco_diffusion_cnclip_vitb16 module
23
0
9,921
16
7
15
def print_help(self): has_portfolio_start = "" if "Delta" in self.greeks["Portfolio"] else "[unvl]" has_portfolio_end = "" if "Delta" in self.greeks["Portfolio"] else "[/unvl]" has_option_start = ( "" if "Delta" in self.greeks["Option A"] or "Delta" in self.greek...
openbb_terminal/stocks/options/hedge/hedge_controller.py
235
OpenBBTerminal
{ "docstring": "Print help\n[param]Ticker: [/param]{self.ticker or None}\n[param]Expiry: [/param]{self.expiration or None}\n[cmds]\n pick pick the underlying asset position\n[/cmds][param]\nUnderlying Asset Position: [/param]{self.underlying_asset_position}\n[cmds]\n list show the available st...
63
Python
28
54a1b6f545a0016c576e9e00eef5c003d229dacf
hedge_controller.py
284,494
28
100
print_help
https://github.com/OpenBB-finance/OpenBBTerminal.git
Feature/hedge (#1768) * [Bug] Incorrect log for reddit keys. #1733 fix * Create new feature-hedge * Significantly improve code of hedge menu * More robust * Robustness * Fix tests * Fix can't multiply sequence by non-int of type 'numpy.float64' error * Temporary fix of singular matrix error. Retur...
192
0
84,760
12
3
7
def with_attribute(*args, **attr_dict): <div> Some text <div type="grid">1 4 0 1 0</div> <div type="graph">1,3 2,3 1,1</div> <div>this has no type</div> </div> if args: attrs = args[:] else: attrs = attr_dict....
pipenv/patched/notpip/_vendor/pyparsing/actions.py
71
pipenv
{ "docstring": "\n Helper to create a validating parse action to be used with start\n tags created with :class:`make_xml_tags` or\n :class:`make_html_tags`. Use ``with_attribute`` to qualify\n a starting tag with a required attribute value, to avoid false\n matches on common tags such as ``<TD>`` or ``...
39
Python
34
f3166e673fe8d40277b804d35d77dcdb760fc3b3
actions.py
20,532
8
47
with_attribute
https://github.com/pypa/pipenv.git
check point progress on only bringing in pip==22.0.4 (#4966) * vendor in pip==22.0.4 * updating vendor packaging version * update pipdeptree to fix pipenv graph with new version of pip. * Vendoring of pip-shims 0.7.0 * Vendoring of requirementslib 1.6.3 * Update pip index safety restrictions patch for p...
139
0
3,414
11
4
9
def architecture_optimizers(self): opts = self.optimizers() if isinstance(opts,list): # pylint: disable=unsubscriptable-object arc_opts = opts[:self.arc_optim_count] if len(arc_opts) == 1: arc_opts = arc_opts[0] return arc_opts ...
nni/retiarii/oneshot/pytorch/base_lightning.py
95
nni
{ "docstring": "\n Get architecture optimizers from all optimizers. Use this to get your architecture optimizers in ``training_step``.\n\n Returns\n ----------\n opts : List[Optimizer], Optimizer, None\n Architecture optimizers defined in ``configure_architecture_optimizers``. T...
43
Python
29
8b2eb425274cdb4537fbce4a315aec12a378d6db
base_lightning.py
111,758
10
57
architecture_optimizers
https://github.com/microsoft/nni.git
Lightning implementation for retiarii oneshot nas (#4479)
155
0
24,481
11
1
15
def test_avatar_allowed_mime_type_global(self): self._setup_local_files( { "good": {"mimetype": "image/png"}, "bad": {"mimetype": "application/octet-stream"}, } ) channel = self.make_request( "PUT", f"/prof...
tests/rest/client/test_profile.py
228
synapse
{ "docstring": "Tests that the MIME type whitelist for avatars is enforced when updating a\n global profile.\n ", "language": "en", "n_whitespaces": 29, "n_words": 15, "vocab_size": 15 }
41
Python
28
bf60da1a60096fac5fb778b732ff2214862ac808
test_profile.py
246,132
24
128
test_avatar_allowed_mime_type_global
https://github.com/matrix-org/synapse.git
Configurable limits on avatars (#11846) Only allow files which file size and content types match configured limits to be set as avatar. Most of the inspiration from the non-test code comes from matrix-org/synapse-dinsic#19
269
0
71,033
12
3
7
def _distribution_strategy_scope(self): if self._distribution_strategy and not tf.distribute.has_strategy(): with self._distribution_strategy.scope(): yield self._distribution_strategy.scope() else: yield
keras/optimizers/optimizer_v2/optimizer_v2.py
74
keras
{ "docstring": "Returns the `tf.distribute.Strategy` this optimizer was created under.", "language": "en", "n_whitespaces": 7, "n_words": 8, "vocab_size": 8 }
13
Python
12
84afc5193d38057e2e2badf9c889ea87d80d8fbf
optimizer_v2.py
275,531
6
40
_distribution_strategy_scope
https://github.com/keras-team/keras.git
Reformatting the codebase with black. PiperOrigin-RevId: 450093126
71
0
81,420
13
31
4
def classify_sysode(eq, funcs=None, **kwargs): r # Sympify equations and convert iterables of equations into # a list of equations
sympy/solvers/ode/ode.py
24
sympy
{ "docstring": "\n Returns a dictionary of parameter names and values that define the system\n of ordinary differential equations in ``eq``.\n The parameters are further used in\n :py:meth:`~sympy.solvers.ode.dsolve` for solving that system.\n\n Some parameter names and values are:\n\n 'is_linear' (...
19
Python
15
65be461082dda54c8748922f9c29a19af1279fe1
ode.py
197,362
154
559
classify_sysode
https://github.com/sympy/sympy.git
Remove abbreviations in documentation
27
0
48,505
6
1
5
def header_store_parse(self, name, value): raise NotImplementedError
python3.10.4/Lib/email/_policybase.py
20
XX-Net
{ "docstring": "Given the header name and the value provided by the application\n program, return the (name, value) that should be stored in the model.\n ", "language": "en", "n_whitespaces": 37, "n_words": 23, "vocab_size": 19 }
6
Python
6
8198943edd73a363c266633e1aa5b2a9e9c9f526
_policybase.py
223,640
2
12
header_store_parse
https://github.com/XX-net/XX-Net.git
add python 3.10.4 for windows
20
0
57,024
6
11
20
async def async_update(self): get_result = await getCmd( *self._request_args, ObjectType(ObjectIdentity(self._baseoid)) ) errindication, errstatus, errindex, restable = await get_result if errindication and not self._accept_errors: _LOGGER.error("SNMP e...
homeassistant/components/snmp/sensor.py
210
core
{ "docstring": "Get the latest data from the remote SNMP capable host.", "language": "en", "n_whitespaces": 9, "n_words": 10, "vocab_size": 9 }
63
Python
46
d9903c4cf985381002de8b923815b05dd24e0651
sensor.py
297,540
18
129
async_update
https://github.com/home-assistant/core.git
Bump `brother` and `pysnmplib` (#84107) * Bump brother version * Bump pysnmplib version * Update sensor platform * Update switch platform * Update tests * Bump brother Co-authored-by: J. Nick Koston <nick@koston.org>
245
0
96,508
16
3
10
def update_df(self, df): if get_current_execution() != "PandasOnRay" or ( not isinstance(df._query_compiler._modin_frame, PandasOnRayDataframe) ): # pragma: no cover ErrorMessage.not_implemented( "Batch Pipeline API is only implemented for `PandasOnRay` ...
modin/experimental/batch/pipeline.py
71
modin
{ "docstring": "\n Update the dataframe to perform this pipeline on.\n\n Parameters\n ----------\n df : modin.pandas.DataFrame\n The new dataframe to perform this pipeline on.\n ", "language": "en", "n_whitespaces": 68, "n_words": 21, "vocab_size": 15 }
31
Python
31
3d4404e9d9a9b2a3327f8aee664a8e71ac1f18b8
pipeline.py
153,774
8
40
update_df
https://github.com/modin-project/modin.git
FEAT-#4412: Add Batch Pipeline API to Modin (#4452) Co-authored-by: Yaroslav Igoshev <Poolliver868@mail.ru> Co-authored-by: Mahesh Vashishtha <mvashishtha@users.noreply.github.com> Signed-off-by: Rehan Durrani <rehan@ponder.io>
108
0
35,598
12
1
16
def forward(self, pred3d, pred2d, inputs): gt_3d_joints = inputs['joints_3d'] gt_2d_joints = inputs['joints_2d'] has_3d_joints = inputs['has_3d_joints'] has_2d_joints = inputs['has_2d_joints'] loss_3d = mpjpe(pred3d, gt_3d_joints, has_3d_joints) loss_2d = keypoi...
ppdet/modeling/losses/pose3d_loss.py
114
PaddleDetection
{ "docstring": "\n mpjpe: mpjpe loss between 3d joints\n keypoint_2d_loss: 2d joints loss compute by criterion_2dpose\n ", "language": "en", "n_whitespaces": 35, "n_words": 13, "vocab_size": 11 }
36
Python
27
d4e34fe165c09db65fd00113708be1b711ac957c
pose3d_loss.py
211,430
9
72
forward
https://github.com/PaddlePaddle/PaddleDetection.git
pose3d metro modeling (#6612) * pose3d metro modeling * delete extra comments
126
0
53,094
9
3
7
def swapaxes(self, axis1, axis2, copy=True): # noqa: PR01, RT01, D200 axis1 = self._get_axis_number(axis1) axis2 = self._get_axis_number(axis2) if axis1 != axis2: return self.transpose() if copy: return self.copy() return self
modin/pandas/base.py
85
modin
{ "docstring": "\n Interchange axes and swap values axes appropriately.\n ", "language": "en", "n_whitespaces": 22, "n_words": 7, "vocab_size": 6 }
28
Python
23
605efa618e7994681f57b11d04d417f353ef8d50
base.py
153,562
8
52
swapaxes
https://github.com/modin-project/modin.git
DOCS-#3099: Fix `BasePandasDataSet` docstrings warnings (#4333) Co-authored-by: Yaroslav Igoshev <Poolliver868@mail.ru> Signed-off-by: Alexander Myskov <alexander.myskov@intel.com>
93
0
35,443
9
1
20
def test_shortlatex(capfd, hello_world_f90, monkeypatch): ipath = Path(hello_world_f90) mname = "blah" monkeypatch.setattr( sys, "argv", f'f2py -m {mname} {ipath} --latex-doc --short-latex'.split(), ) with util.switchdir(ipath.parent): f2pycli() out, _ =...
numpy/f2py/tests/test_f2py2e.py
161
numpy
{ "docstring": "Ensures that truncated documentation is written out\n\n TODO: Test to ensure this has no effect without --latex-doc\n CLI :: --latex-doc --short-latex\n ", "language": "en", "n_whitespaces": 30, "n_words": 21, "vocab_size": 20 }
44
Python
39
729ad4f92420231e2a7009b3223c6c7620b8b808
test_f2py2e.py
160,145
14
83
test_shortlatex
https://github.com/numpy/numpy.git
TST: Initialize f2py2e tests of the F2PY CLI (#20668) Increases F2PY coverage by around 15 percent. For the CLI itself it covers the major features (around 70 percent), with the exception of mostly numpy.distutils stuff. More importantly, sets the groundwork for #20056, in that passing the same testsuite should ind...
122
0
38,517
16
2
17
def inset_axes(self, bounds, *, transform=None, zorder=5, **kwargs): if transform is None: transform = self.transAxes kwargs.setdefault('label', 'inset_axes') # This puts the rectangle into figure-relative coordinates. inset_locator = _TransformedBoundsLocator(bound...
lib/matplotlib/axes/_axes.py
165
matplotlib
{ "docstring": "\n Add a child inset Axes to this existing Axes.\n\n Warnings\n --------\n This method is experimental as of 3.0, and the API may change.\n\n Parameters\n ----------\n bounds : [x0, y0, width, height]\n Lower-left corner of inset Axes, and it...
68
Python
54
80e672e0700fa8a268aed1bdefedbd0e493e91a7
_axes.py
108,111
12
103
inset_axes
https://github.com/matplotlib/matplotlib.git
enable Axes subclass creation by Axes.inset_axes
185
0
23,066
9
1
16
def test_no_access(self): member = self.create_user("hernando@life.com") self.create_member(user=member, organization=self.organization, role="member") self.login_as(member) self.get_error_response(self.organization.slug, status=status.HTTP_403_FORBIDDEN) admin = self.c...
tests/sentry/api/endpoints/test_organization_codeowners_associations.py
167
sentry
{ "docstring": "\n Tests that users without the 'org:integrations' scope (i.e. Members) cannot access this endpoint.\n ", "language": "en", "n_whitespaces": 28, "n_words": 13, "vocab_size": 13 }
20
Python
18
5efa5eeb57ae6ddf740256e08ce3b9ff4ec98eaa
test_organization_codeowners_associations.py
95,411
9
102
test_no_access
https://github.com/getsentry/sentry.git
feat(codeowners): Add endpoint to view code owner associations per organization (#31030) See API-2186 So the earlier version of this PR just had the endpoint return the entire serialized ProjectCodeOwners for an organization. While that works, the intention behind this feature is to read and use the associations, s...
83
0
19,206
9
3
14
def predict_proba(self, X): check_is_fitted(self) y_pred = self.final_estimator_.predict_proba(self.transform(X)) if isinstance(self._label_encoder, list): # Handle the multilabel-indicator cases y_pred = np.array([preds[:, 0] for preds in y_pred]).T ret...
sklearn/ensemble/_stacking.py
97
scikit-learn
{ "docstring": "Predict class probabilities for `X` using the final estimator.\n\n Parameters\n ----------\n X : {array-like, sparse matrix} of shape (n_samples, n_features)\n Training vectors, where `n_samples` is the number of samples and\n `n_features` is the number of fe...
25
Python
22
c18460f78441f11b3e6c15c12238695fcfe3c872
_stacking.py
260,917
6
60
predict_proba
https://github.com/scikit-learn/scikit-learn.git
EHN Add multilabel classification support for `StackingClassifier` (#24146) * Add stacking multilabel functionality * Add underscore to a class attr * Remove model from base estimator in test_stacking * Remove scale in train/test split in test_stacking_classifier_multilabel * Add stack_method as a test parameter, ...
82
0
76,564
14
1
7
def test_worker_stdout(): script =
python/ray/tests/test_output.py
40
script = """@ray.remote
ray
{ "docstring": "\nimport ray\nimport sys\n\nray.init(num_cpus=2)\n\n@ray.remote", "language": "en", "n_whitespaces": 2, "n_words": 6, "vocab_size": 5 }
4
Python
4
2da2ac52ce3103ddb5192e7a161fec312dcdad53
test_output.py
129,473
22
96
test_worker_stdout
https://github.com/ray-project/ray.git
Unskipped test_worker_stdout (#21708)
7
2
28,963
6
11
34
def incidence_matrix(G, nodelist=None, edgelist=None, oriented=False, weight=None): import scipy as sp import scipy.sparse # call as sp.sparse if nodelist is None: nodelist = list(G) if edgelist is None: if G.is_multigraph(): edgelist = list(G.edges(keys=True)) ...
networkx/linalg/graphmatrix.py
438
networkx
{ "docstring": "Returns incidence matrix of G.\n\n The incidence matrix assigns each row to a node and each column to an edge.\n For a standard incidence matrix a 1 appears wherever a row's node is\n incident on the column's edge. For an oriented incidence matrix each\n edge is assigned an orientation (a...
138
Python
86
8a325d26aa7fdd3a72580c4720fa97f971bbefcb
graphmatrix.py
177,330
38
275
incidence_matrix
https://github.com/networkx/networkx.git
Use scipy.sparse array datastructure (#6037) * Use scipy.sparse array datastructure * Add reminder to rm wrapper when scipy adds creation fns. * Rm mention of np matrix from code comment. * Update networkx/algorithms/bipartite/matrix.py Co-authored-by: Stefan van der Walt <sjvdwalt@gmail.com> Co-authore...
458
0
42,349
18
1
6
def save_or_write_to_kafka(self): from sentry.region_to_control.producer import produce_audit_log_entry produce_audit_log_entry(self)
src/sentry/models/auditlogentry.py
31
sentry
{ "docstring": "\n Region Silos do not have access to the AuditLogEntry table which is specific to the control silo.\n For those silos, this method publishes the attempted audit log write to a durable kafka queue synchronously\n that will eventually be consumed by the control silo. For the contr...
7
Python
7
941184cd24186324fd9f7f304b7f713041834726
auditlogentry.py
86,876
3
18
save_or_write_to_kafka
https://github.com/getsentry/sentry.git
chore(hybrid-cloud): AuditLogEntry is a control silo model now (#39890) In the control silo, creating an audit log entry writes to the db directly, whilst in region silo mode creating an audit log entry will instead push to a new kafka producer that consumes into the control silo asynchronously.
28
0
18,179
7
7
24
def open_metadata(self, book, custom_columns): if config.config_use_google_drive: if not gdriveutils.is_gdrive_ready(): raise Exception('Google Drive is configured but not ready') web_content_link = gdriveutils.get_metadata_backup_via_gdrive(book.path) if not...
cps/tasks/metadata_backup.py
209
calibre-web
{ "docstring": "namespaces = {'dc': PURL_NAMESPACE, 'opf': OPF_NAMESPACE}\n test = etree.parse(book_metadata_filepath)\n root = test.getroot()\n for i in root.iter():\n self.log.info(i)\n title = root.find(\"dc:metadata\", namespaces)\n pass\n ...
92
Python
68
26be5ee2372b08c2f906661283a12e84d6c181f8
metadata_backup.py
173,483
37
121
open_metadata
https://github.com/janeczku/calibre-web.git
Backup metadata 3rd step
380
0
40,848
15
1
5
def proxyauth(self, user): name = 'PROXYAUTH' return self._simple_command('PROXYAUTH', user)
python3.10.4/Lib/imaplib.py
37
XX-Net
{ "docstring": "Assume authentication as \"user\".\n\n Allows an authorised administrator to proxy into any user's\n mailbox.\n\n (typ, [data]) = <instance>.proxyauth(user)\n ", "language": "en", "n_whitespaces": 46, "n_words": 18, "vocab_size": 18 }
9
Python
9
8198943edd73a363c266633e1aa5b2a9e9c9f526
imaplib.py
217,908
3
20
proxyauth
https://github.com/XX-net/XX-Net.git
add python 3.10.4 for windows
30
0
55,006
8
11
27
def copy_m2m_relationships(obj1, obj2, fields, kwargs=None): for field_name in fields: if hasattr(obj1, field_name): try: field_obj = obj1._meta.get_field(field_name) except FieldDoesNotExist: continue if isinstance(field_obj, ManyToMa...
awx/main/utils/common.py
263
awx
{ "docstring": "\n In-place operation.\n Given two saved objects, copies related objects from obj1\n to obj2 to field of same name, if field occurs in `fields`\n ", "language": "en", "n_whitespaces": 36, "n_words": 23, "vocab_size": 21 }
110
Python
77
33c0fb79d66f56374d7c042ba79887faa85e2885
common.py
81,766
22
164
copy_m2m_relationships
https://github.com/ansible/awx.git
JT param everything (#12646) * Making almost all fields promptable on job templates and config models * Adding EE, IG and label access checks * Changing jobs preferred instance group function to handle the new IG cache field * Adding new ask fields to job template modules * Address unit/functional tests * Adding ...
521
0
17,256
21
1
21
def keypoint_rotate(keypoint, angle, rows, cols, **params): center = (cols - 1) * 0.5, (rows - 1) * 0.5 matrix = cv2.getRotationMatrix2D(center, angle, 1.0) x, y, a, s = keypoint[:4] x, y = cv2.transform(np.array([[[x, y]]]), matrix).squeeze() return x, y, a + math.radians(angle), s @preserve...
albumentations/augmentations/geometric/functional.py
153
@preserve_channel_dim
albumentations
{ "docstring": "Rotate a keypoint by angle.\n\n Args:\n keypoint (tuple): A keypoint `(x, y, angle, scale)`.\n angle (float): Rotation angle.\n rows (int): Image height.\n cols (int): Image width.\n\n Returns:\n tuple: A keypoint `(x, y, angle, scale)`.\n\n ", "language":...
43
Python
32
557b7b44b393d0701413ed8012a920a0691e06cb
functional.py
225,637
6
107
keypoint_rotate
https://github.com/albumentations-team/albumentations.git
Fix Affine wrong rotation angle (#1091) * Fix Affine wrong rotation angle * Link to issue * Fix Perspective rot. angle for keypoints, fix Affine * Change angle sign, do not change it manually after all changes * Tests * Fix tests and image center * Fix shift_rotate tests Co-authored-by: Eugene Khv...
60
1
57,482
15
1
7
def disable_memoization() -> None: ParserElement.reset_cache() ParserElement._left_recursion_enabled = False ParserElement._packratEnabled = False ParserElement._parse = ParserElement._parseNoCache
pipenv/patched/notpip/_vendor/pyparsing/core.py
51
pipenv
{ "docstring": "\n Disables active Packrat or Left Recursion parsing and their memoization\n\n This method also works if neither Packrat nor Left Recursion are enabled.\n This makes it safe to call before activating Packrat nor Left Recursion\n to clear any previous settings.\n ", ...
14
Python
11
f3166e673fe8d40277b804d35d77dcdb760fc3b3
core.py
20,579
12
29
disable_memoization
https://github.com/pypa/pipenv.git
check point progress on only bringing in pip==22.0.4 (#4966) * vendor in pip==22.0.4 * updating vendor packaging version * update pipdeptree to fix pipenv graph with new version of pip. * Vendoring of pip-shims 0.7.0 * Vendoring of requirementslib 1.6.3 * Update pip index safety restrictions patch for p...
49
0
3,434
7
3
14
def scale(image, factor, resample=Image.Resampling.BICUBIC): if factor == 1: return image.copy() elif factor <= 0: raise ValueError("the factor must be greater than 0") else: size = (round(factor * image.width), round(factor * image.height)) return image.resize(size, res...
src/PIL/ImageOps.py
111
Pillow
{ "docstring": "\n Returns a rescaled image by a specific factor given in parameter.\n A factor greater than 1 expands the image, between 0 and 1 contracts the\n image.\n\n :param image: The image to rescale.\n :param factor: The expansion factor, as a float.\n :param resample: Resampling method to ...
34
Python
30
f8e4e9c2dd94c6f4789639dd891b8a6d5fb16e14
ImageOps.py
242,247
8
69
scale
https://github.com/python-pillow/Pillow.git
Added enums
74
0
69,806
14
4
12
def get_all(self, name, failobj=None): values = [] name = name.lower() for k, v in self._headers: if k.lower() == name: values.append(self.policy.header_fetch_parse(k, v)) if not values: return failobj return values
python3.10.4/Lib/email/message.py
103
XX-Net
{ "docstring": "Return a list of all the values for the named field.\n\n These will be sorted in the order they appeared in the original\n message, and may contain duplicates. Any fields deleted and\n re-inserted are always appended to the header list.\n\n If no such fields exist, failobj...
28
Python
24
8198943edd73a363c266633e1aa5b2a9e9c9f526
message.py
223,791
9
64
get_all
https://github.com/XX-net/XX-Net.git
add python 3.10.4 for windows
107
0
57,068
14
6
15
def get_error_message(subscriber, num=1e6, error_type=None, timeout=20): deadline = time.time() + timeout msgs = [] while time.time() < deadline and len(msgs) < num: _, error_data = subscriber.poll(timeout=deadline - time.time()) if not error_data: # Timed out before any dat...
python/ray/_private/test_utils.py
159
ray
{ "docstring": "Gets errors from GCS subscriber.\n\n Returns maximum `num` error strings within `timeout`.\n Only returns errors of `error_type` if specified.\n ", "language": "en", "n_whitespaces": 28, "n_words": 19, "vocab_size": 18 }
52
Python
43
391901f86bc0bec6d3199ac05f316a05bcc4b910
test_utils.py
146,747
12
99
get_error_message
https://github.com/ray-project/ray.git
[Remove Redis Pubsub 2/n] clean up remaining Redis references in gcs_utils.py (#23233) Continue to clean up Redis and other related Redis references, for - gcs_utils.py - log_monitor.py - `publish_error_to_driver()`
139
0
33,771
14
1
3
def socket(self): return self.sock # Utility methods
python3.10.4/Lib/imaplib.py
20
XX-Net
{ "docstring": "Return socket instance used to connect to IMAP4 server.\n\n socket = <instance>.socket()\n ", "language": "en", "n_whitespaces": 26, "n_words": 12, "vocab_size": 10 }
7
Python
7
8198943edd73a363c266633e1aa5b2a9e9c9f526
imaplib.py
217,955
2
10
socket
https://github.com/XX-net/XX-Net.git
add python 3.10.4 for windows
30
0
55,035
6
2
6
def path_to_string(path): if isinstance(path, os.PathLike): return os.fspath(path) return path
keras/utils/io_utils.py
42
keras
{ "docstring": "Convert `PathLike` objects to their string representation.\n\n If given a non-string typed path object, converts it to its string\n representation.\n\n If the object passed to `path` is not among the above, then it is\n returned unchanged. This allows e.g. passthrough of file objects\n ...
9
Python
8
84afc5193d38057e2e2badf9c889ea87d80d8fbf
io_utils.py
276,920
4
25
path_to_string
https://github.com/keras-team/keras.git
Reformatting the codebase with black. PiperOrigin-RevId: 450093126
25
0
81,776
9
3
5
def creatable_subpage_models(cls): return [ page_model for page_model in cls.allowed_subpage_models() if page_model.is_creatable ]
wagtail/core/models/__init__.py
37
wagtail
{ "docstring": "\n Returns the list of page types that may be created under this page type,\n as a list of model classes\n ", "language": "en", "n_whitespaces": 42, "n_words": 20, "vocab_size": 17 }
12
Python
11
d10f15e55806c6944827d801cd9c2d53f5da4186
__init__.py
73,787
6
22
creatable_subpage_models
https://github.com/wagtail/wagtail.git
Reformat with black
66
0
16,110
9
1
9
def set_full_path(self) -> None: self.full_path = Path( self.config["user_data_dir"] / "models" / f"{self.freqai_info['identifier']}" ) self.full_path.mkdir(parents=True, exist_ok=True)
freqtrade/freqai/freqai_interface.py
82
freqtrade
{ "docstring": "\n Creates and sets the full path for the identifier\n ", "language": "en", "n_whitespaces": 24, "n_words": 9, "vocab_size": 8 }
15
Python
14
5ee3b8cbbb89c8a57cb42cc3253001e47720991b
freqai_interface.py
151,540
8
40
set_full_path
https://github.com/freqtrade/freqtrade.git
update config recording to use all configs, fix tests
54
0
35,040
13
2
9
def get_template(self, template_name): template, origin = self.find_template(template_name) if not hasattr(template, "render"): # template needs to be compiled template = Template(template, origin, template_name, engine=self) return template
django/template/engine.py
70
django
{ "docstring": "\n Return a compiled Template object for the given template name,\n handling template inheritance recursively.\n ", "language": "en", "n_whitespaces": 36, "n_words": 14, "vocab_size": 13 }
25
Python
22
9c19aff7c7561e3a82978a272ecdaad40dda5c00
engine.py
206,272
5
43
get_template
https://github.com/django/django.git
Refs #33476 -- Reformatted code with Black.
75
0
51,459
11
2
31
def get_roi_rel_points_test(self, mask_pred, pred_label, cfg): num_points = cfg.subdivision_num_points uncertainty_map = get_uncertainty(mask_pred, pred_label) num_rois, _, mask_height, mask_width = uncertainty_map.shape # During ONNX exporting, the type of each elements of 'sh...
mmdet/models/roi_heads/mask_heads/mask_point_head.py
267
mmdetection
{ "docstring": "Get ``num_points`` most uncertain points during test.\n\n Args:\n mask_pred (Tensor): A tensor of shape (num_rois, num_classes,\n mask_height, mask_width) for class-specific or class-agnostic\n prediction.\n pred_label (list): The predication ...
123
Python
80
7d1ce22e3328ba89c11b6cdaafff6c96d9da3f4f
mask_point_head.py
244,149
18
175
get_roi_rel_points_test
https://github.com/open-mmlab/mmdetection.git
Fix `pointrend` missing `get_uncertainty` function bug (#7550) * [Fix] Adjust the order of get_classes and FileClient. (#7276) * delete -sv (#7277) Co-authored-by: Wenwei Zhang <40779233+ZwwWayne@users.noreply.github.com> * [Docs] Add Chinese version of finetune (#7178) * [Fix] Fix wrong img name in onnx2t...
286
0
70,261
12
1
2
async def logger_test_deployment(orion_client):
tests/test_logging.py
14
prefect
{ "docstring": "\n A deployment with a flow that returns information about the given loggers\n ", "language": "en", "n_whitespaces": 19, "n_words": 12, "vocab_size": 12 }
3
Python
3
b110baccdbfde300f410b069c873e8b2a2c98e00
test_logging.py
53,100
11
53
logger_test_deployment
https://github.com/PrefectHQ/prefect.git
Add test
6
0
10,717
6
1
2
def pointcloud(self): return self["pointcloud"]
packages/python/plotly/plotly/graph_objs/layout/template/_data.py
22
plotly.py
{ "docstring": "\n The 'pointcloud' property is a tuple of instances of\n Pointcloud that may be specified as:\n - A list or tuple of instances of plotly.graph_objs.layout.template.data.Pointcloud\n - A list or tuple of dicts of string/value properties that\n will be passed ...
4
Python
4
43e3a4011080911901176aab919c0ecf5046ddd3
_data.py
232,547
2
11
pointcloud
https://github.com/plotly/plotly.py.git
switch to black .22
18
0
63,991
7
5
25
def varOr(population, toolbox, lambda_, cxpb, mutpb): offspring = [] for _ in range(lambda_): op_choice = np.random.random() if op_choice < cxpb: # Apply crossover ind1, ind2 = pick_two_individuals_eligible_for_crossover(population) if ind1 is not None: ...
tpot/gp_deap.py
228
tpot
{ "docstring": "Part of an evolutionary algorithm applying only the variation part\n (crossover, mutation **or** reproduction). The modified individuals have\n their fitness invalidated. The individuals are cloned so returned\n population is independent of the input population.\n :param population: A list...
95
Python
68
388616b6247ca4ea8de4e2f340d6206aee523541
gp_deap.py
181,912
19
142
varOr
https://github.com/EpistasisLab/tpot.git
Revert "Deployed 7ccda9a with MkDocs version: 1.3.0" This reverts commit bd9629c40e01241766197119b581a99409b07068.
301
0
43,664
15
2
9
def make_valid(self): if geos_version_tuple() < (3, 8): raise GEOSException("GEOSGeometry.make_valid() requires GEOS >= 3.8.0.") return GEOSGeometry(capi.geos_makevalid(self.ptr), srid=self.srid) # #### Unary predicates ####
django/contrib/gis/geos/geometry.py
68
django
{ "docstring": "\n Attempt to create a valid representation of a given invalid geometry\n without losing any of the input vertices.\n ", "language": "en", "n_whitespaces": 40, "n_words": 18, "vocab_size": 16 }
21
Python
20
9c19aff7c7561e3a82978a272ecdaad40dda5c00
geometry.py
204,039
4
40
make_valid
https://github.com/django/django.git
Refs #33476 -- Reformatted code with Black.
56
0
50,621
10
1
10
def test_image_inside_paragraph(self): # In Draftail's data model, images are block-level elements and therefore # split up preceding / following text into their own paragraphs converter = ContentstateConverter(features=["image"]) result = json.loads( converter.from_database_...
wagtail/admin/tests/test_contentstate.py
347
wagtail
{ "docstring": "\n <p>before <embed embedtype=\"image\" alt=\"an image\" id=\"1\" format=\"left\" /> after</p>\n ", "language": "en", "n_whitespaces": 32, "n_words": 9, "vocab_size": 9 }
111
Python
72
d10f15e55806c6944827d801cd9c2d53f5da4186
test_contentstate.py
71,919
52
181
test_image_inside_paragraph
https://github.com/wagtail/wagtail.git
Reformat with black
1,056
0
15,780
16
8
12
def get_losses_for(self, inputs): if inputs is None: # Requesting unconditional losses. return [l for l in self.losses if l._unconditional_loss] # Requesting input-conditional losses. losses = [l for l in self.losses if not l._unconditional_loss] inputs ...
keras/engine/base_layer_v1.py
117
keras
{ "docstring": "Retrieves losses relevant to a specific set of inputs.\n\n Args:\n inputs: Input tensor or list/tuple of input tensors.\n\n Returns:\n List of loss tensors of the layer that depend on `inputs`.\n ", "language": "en", "n_whitespaces": 69, "n_words": 30, "v...
50
Python
27
84afc5193d38057e2e2badf9c889ea87d80d8fbf
base_layer_v1.py
270,970
7
75
get_losses_for
https://github.com/keras-team/keras.git
Reformatting the codebase with black. PiperOrigin-RevId: 450093126
121
0
80,627
10
1
12
def load_config(file_path): _, ext = os.path.splitext(file_path) assert ext in ['.yml', '.yaml'], "only support yaml files for now" config = yaml.load(open(file_path, 'rb'), Loader=yaml.Loader) return config
tools/program.py
84
PaddleOCR
{ "docstring": "\n Load config from yml/yaml file.\n Args:\n file_path (str): Path of the config file to be loaded.\n Returns: global config\n ", "language": "en", "n_whitespaces": 39, "n_words": 19, "vocab_size": 17 }
24
Python
21
a323fce66dd68a881cf599526185b52ab5df356b
program.py
22,954
5
49
load_config
https://github.com/PaddlePaddle/PaddleOCR.git
vqa code integrated into ppocr training system
39
0
4,491
11
1
47
def test_basic(request): processing_factory = MetricsConsumerStrategyFactory( max_msg_batch_size=1, max_msg_batch_time=1, max_parallel_batch_size=1, max_parallel_batch_time=1, max_batch_size=1, max_batch_time=1, processes=1, input_block_size=1024,...
tests/sentry/sentry_metrics/test_parallel_indexer.py
284
sentry
{ "docstring": "\n Integration test to verify that the parallel indexer can spawn subprocesses\n properly. The main purpose is to verify that there are no\n pickling/unpickling errors when passing the strategy into the\n ParallelTransformStep, as that is easy to break.\n ", "language": "en", "n_whi...
73
Python
65
d62c4935f02238a8f3991da5ef280a4bf249d771
test_parallel_indexer.py
85,561
35
184
test_basic
https://github.com/getsentry/sentry.git
fix(metrics): Fix startup crash in parallel indexer [sns-1490] (#38455) Since https://github.com/getsentry/sentry/pull/38225 the parallel indexer fails to serialize the processing function here: https://github.com/getsentry/sentry/blob/9bf499ad95030ed1112f117c5c1be59b2e036509/src/sentry/sentry_metrics/consumers/inde...
316
0
18,005
17
5
17
def _get_coeff_exp(expr, x): from sympy.simplify import powsimp (c, m) = expand_power_base(powsimp(expr)).as_coeff_mul(x) if not m: return c, S.Zero [m] = m if m.is_Pow: if m.base != x: raise _CoeffExpValueError('expr not of form a*x**b') return c, m.exp ...
sympy/integrals/meijerint.py
148
sympy
{ "docstring": "\n When expr is known to be of the form c*x**b, with c and/or b possibly 1,\n return c, b.\n\n Examples\n ========\n\n >>> from sympy.abc import x, a, b\n >>> from sympy.integrals.meijerint import _get_coeff_exp\n >>> _get_coeff_exp(a*x**b, x)\n (a, b)\n >>> _get_coeff_exp(x...
52
Python
37
f757f3daae6e11ea0cfb7dadc133274d8d74315f
meijerint.py
196,786
14
90
_get_coeff_exp
https://github.com/sympy/sympy.git
Reordered imports 2
122
0
48,173
12
1
8
def activate(self) -> str: load_kube_config_from_dict( config_dict=self.config, context=self.context, ) return self.current_context()
src/prefect/blocks/kubernetes.py
48
prefect
{ "docstring": "\n Convenience method for activating the k8s config stored in an instance of this block\n\n Returns current_context for sanity check\n ", "language": "en", "n_whitespaces": 41, "n_words": 19, "vocab_size": 18 }
10
Python
10
8f3ffd09dc47bfd2af6a635cc04c640febffd519
kubernetes.py
56,999
11
29
activate
https://github.com/PrefectHQ/prefect.git
add test coerage for get_api_client and activate
60
0
11,603
9
6
15
def parse_python_requires(value): # type: (t.Any) -> tuple[str, ...] if not isinstance(value, str): raise ValueError('python_requires must must be of type `str` not type `%s`' % type(value)) versions: tuple[str, ...] if value == 'default': versions = SUPPORTED_PYTHON_VERSIONS eli...
test/lib/ansible_test/_internal/content_config.py
136
ansible
{ "docstring": "Parse the given 'python_requires' version specifier and return the matching Python versions.", "language": "en", "n_whitespaces": 11, "n_words": 12, "vocab_size": 11 }
57
Python
41
f2abfc4b3d03a2baa078477d0ad2241263a00668
content_config.py
267,750
12
79
parse_python_requires
https://github.com/ansible/ansible.git
ansible-test - Parse content config only once. (#78418)
114
0
79,034
16
3
12
def set_to_context(self, name): attribute = self.fattributes[name] if isinstance(attribute, NonInheritableFieldAttribute): # setting to sentinel will trigger 'default/default()' on getter setattr(self, name, Sentinel) else: try: setat...
lib/ansible/playbook/base.py
100
ansible
{ "docstring": " set to parent inherited value or Sentinel as appropriate", "language": "en", "n_whitespaces": 9, "n_words": 9, "vocab_size": 9 }
41
Python
35
ff6e4da36addccb06001f7b05b1a9c04ae1d7984
base.py
268,567
9
64
set_to_context
https://github.com/ansible/ansible.git
fixes to FA inheritance (#78990) finalized applies to all field attributes fix getting parent value also remove unused/needed extend/prepend signature moar testing
158
0
79,551
15
3
16
def is_url_equal(url, other_url): # type: (str, str) -> bool if not isinstance(url, str): raise TypeError(f"Expected string for url, received {url!r}") if not isinstance(other_url, str): raise TypeError(f"Expected string for url, received {other_url!r}") parsed_url = urllib3_util.pa...
pipenv/utils/internet.py
164
pipenv
{ "docstring": "\n Compare two urls by scheme, host, and path, ignoring auth\n\n :param str url: The initial URL to compare\n :param str url: Second url to compare to the first\n :return: Whether the URLs are equal without **auth**, **query**, and **fragment**\n :rtype: bool\n\n >>> is_url_equal(\"h...
51
Python
35
3387881a6d4fc2d8bdc0f05c484cb2f7222acfb8
internet.py
19,514
10
98
is_url_equal
https://github.com/pypa/pipenv.git
Code reorg utils into utils module reduces complexity (#4990) * Split apart the massive utils.py into a utils module
92
0
3,008
11
1
3
def iloc(self) -> _iLocIndexer: return _iLocIndexer("iloc", self)
pandas/core/indexing.py
28
pandas
{ "docstring": "\n Purely integer-location based indexing for selection by position.\n\n ``.iloc[]`` is primarily integer position based (from ``0`` to\n ``length-1`` of the axis), but may also be used with a boolean\n array.\n\n Allowed inputs are:\n\n - An integer, e.g. ``5...
7
Python
7
e7afa4b641b146874d17c36caa8a050bfde31283
indexing.py
168,154
137
15
iloc
https://github.com/pandas-dev/pandas.git
DOC: Add tuple description to allowed inputs for iloc #47799 (#47989) DOC: Add tuple description to allowed inputs for iloc
21
0
40,218
8
3
6
def set_task_factory(self, factory): if factory is not None and not callable(factory): raise TypeError('task factory must be a callable or None') self._task_factory = factory
python3.10.4/Lib/asyncio/base_events.py
52
XX-Net
{ "docstring": "Set a task factory that will be used by loop.create_task().\n\n If factory is None the default task factory will be set.\n\n If factory is a callable, it should have a signature matching\n '(loop, coro)', where 'loop' will be a reference to the active\n event loop, 'coro' w...
23
Python
20
8198943edd73a363c266633e1aa5b2a9e9c9f526
base_events.py
220,322
4
30
set_task_factory
https://github.com/XX-net/XX-Net.git
add python 3.10.4 for windows
55
0
55,965
10
1
4
def ports(self): return self.runtime_args.port
jina/serve/gateway.py
22
jina
{ "docstring": "Gets all the list of ports from the runtime_args as a list.\n :return: The lists of ports to be exposed\n ", "language": "en", "n_whitespaces": 34, "n_words": 20, "vocab_size": 17 }
4
Python
4
e4b930e6369f1ec69b07af6190d61aa3cb3d9cec
gateway.py
13,573
2
12
ports
https://github.com/jina-ai/jina.git
refactor: add properties to gateway (#5417)
18
0
2,687
7
3
9
def _set_level(self, value=None): if value is None and hasattr(self, "_level"): del self._level else: self._level = int(value) level = property(_get_level, _set_level, _set_level)
django/contrib/messages/storage/base.py
75
django
{ "docstring": "\n Set a custom minimum recorded level.\n\n If set to ``None``, the default level will be used (see the\n ``_get_level`` method).\n ", "language": "en", "n_whitespaces": 49, "n_words": 20, "vocab_size": 19 }
21
Python
19
9c19aff7c7561e3a82978a272ecdaad40dda5c00
base.py
204,172
5
36
_set_level
https://github.com/django/django.git
Refs #33476 -- Reformatted code with Black.
67
0
50,669
11
3
5
def _as_graph_element(obj): conv_fn = getattr(obj, "_as_graph_element", None) if conv_fn and callable(conv_fn): return conv_fn() return None
keras/backend.py
52
keras
{ "docstring": "Convert `obj` to a graph element if possible, otherwise return `None`.\n\n Args:\n obj: Object to convert.\n\n Returns:\n The result of `obj._as_graph_element()` if that method is available;\n otherwise `None`.\n ", "language": "en", "n_whitespaces": 56, "n_words": 28...
15
Python
13
84afc5193d38057e2e2badf9c889ea87d80d8fbf
backend.py
269,626
5
30
_as_graph_element
https://github.com/keras-team/keras.git
Reformatting the codebase with black. PiperOrigin-RevId: 450093126
34
0
80,244
9
3
8
def _find_gcs_address_or_die(): gcs_addresses = _find_address_from_flag("--gcs-address") if len(gcs_addresses) > 1: raise ConnectionError( f"Found multiple active Ray instances: {gcs_addresses}. " "Please specify the one to connect to by setting `--address` flag " ...
python/ray/_private/services.py
102
ray
{ "docstring": "Find one GCS address unambiguously, or raise an error.\n\n Callers outside of this module should use get_ray_address_to_use_or_die()\n ", "language": "en", "n_whitespaces": 23, "n_words": 17, "vocab_size": 17 }
66
Python
43
70db5c5592d94b611fee0a334414f1f4f5cc151a
services.py
128,999
14
50
_find_gcs_address_or_die
https://github.com/ray-project/ray.git
[GCS][Bootstrap n/n] Do not start Redis in GCS bootstrapping mode (#21232) After this change in GCS bootstrapping mode, Redis no longer starts and `address` is treated as the GCS address of the Ray cluster. Co-authored-by: Yi Cheng <chengyidna@gmail.com> Co-authored-by: Yi Cheng <74173148+iycheng@users.noreply.git...
168
0
28,869
12
15
39
def get_tax_template(posting_date, args): args = frappe._dict(args) conditions = [] if posting_date: conditions.append( f ) else: conditions.append("(from_date is null) and (to_date is null)") conditions.append( "ifnull(tax_category, '') = {0}".format(frappe.db.escape(cstr(args.get("tax_category")))...
erpnext/accounts/doctype/tax_rule/tax_rule.py
559
erpnext
{ "docstring": "Get matching tax rule(from_date is null or from_date <= '{posting_date}')\n\t\t\tand (to_date is null or to_date >= '{posting_date}')select * from `tabTax Rule`\n\t\twhere {0}", "language": "en", "n_whitespaces": 21, "n_words": 24, "vocab_size": 21 }
159
Python
103
05dd1d6d15c6c8c66165e9f267078c3cf9aec10e
tax_rule.py
68,528
51
312
get_tax_template
https://github.com/frappe/erpnext.git
refactor: tax rule validity query (#30934)
108
0
14,812
18
1
7
def apothem(self): return self.radius * cos(S.Pi/self._n)
sympy/geometry/polygon.py
36
sympy
{ "docstring": "The inradius of the RegularPolygon.\n\n The apothem/inradius is the radius of the inscribed circle.\n\n Returns\n =======\n\n apothem : number or instance of Basic\n\n See Also\n ========\n\n sympy.geometry.line.Segment.length, sympy.geometry.ellipse.Ci...
6
Python
6
498015021131af4dbb07eb110e5badaba8250c7b
polygon.py
196,303
2
21
apothem
https://github.com/sympy/sympy.git
Updated import locations
20
0
47,803
10
1
5
def seek(self, offset, whence=0): # type: (int, int) -> int return self._file.seek(offset, whence)
.venv/lib/python3.8/site-packages/pip/_internal/network/lazy_wheel.py
37
transferlearning
{ "docstring": "Change stream position and return the new absolute position.\n\n Seek to offset relative position indicated by whence:\n * 0: Start of stream (the default). pos should be >= 0;\n * 1: Current position - pos may be negative;\n * 2: End of stream - pos usually negative.\n ...
13
Python
13
f638f5d0e6c8ebed0e69a6584bc7f003ec646580
lazy_wheel.py
60,894
2
23
seek
https://github.com/jindongwang/transferlearning.git
upd; format
34
0
12,326
8
1
19
def test_python_render_indent_guides(): syntax = Panel.fit( Syntax( CODE, lexer="python", line_numbers=True, line_range=(2, 10), theme="default", code_width=60, word_wrap=True, indent_guides=True, ), ...
tests/test_syntax.py
294
rich
{ "docstring": "Iterate and generate a tuple with a flag for first \\x1b[0m\\x1b[48;2;248;248;248m \\x1b[0m│\\n│\\x1b[48;2;248;248;248m \\x1b[0m\\x1b[3;38;2;186;33;33;48;2;248;248;248mand last value.", "language": "en", "n_whitespaces": 18, "n_words": 15, "vocab_size": 14 }
89
Python
59
2cf35b18a8716c963c0f9252544a3a8b9881cd6c
test_syntax.py
161,336
18
73
test_python_render_indent_guides
https://github.com/Textualize/rich.git
Try using default theme in test to avoid ubuntu/macos variance
546
0
38,965
12
8
12
async def install(cls): for field in cls.__fields__.values(): if Block.is_block_class(field.type_): await field.type_.install() if get_origin(field.type_) is Union: for type in get_args(field.type_): if Block.is_block_class(typ...
src/prefect/blocks/core.py
116
prefect
{ "docstring": "\n Makes block available for configuration with current Orion server.\n Recursively installs all nested blocks. Installation is idempotent.\n ", "language": "en", "n_whitespaces": 39, "n_words": 17, "vocab_size": 17 }
23
Python
18
6a1cec80715c2b633362403a6be9470fc70c31e8
core.py
56,175
25
160
install
https://github.com/PrefectHQ/prefect.git
Makes block installation recursive
131
0
11,450
16
1
12
def test_json_to_doc_validation_error(doc): doc_json = doc.to_json() doc_json.pop("tokens") with pytest.raises(ValueError): Doc(doc.vocab).from_json(doc_json, validate=True)
spacy/tests/doc/test_json_doc_conversion.py
75
spaCy
{ "docstring": "Test that Doc.from_json() raises an exception when validating invalid input.", "language": "en", "n_whitespaces": 9, "n_words": 10, "vocab_size": 10 }
10
Python
10
8387ce4c01db48d92ac5638e18316c0f1fc8861e
test_json_doc_conversion.py
111,418
5
42
test_json_to_doc_validation_error
https://github.com/explosion/spaCy.git
Add Doc.from_json() (#10688) * Implement Doc.from_json: rough draft. * Implement Doc.from_json: first draft with tests. * Implement Doc.from_json: added documentation on website for Doc.to_json(), Doc.from_json(). * Implement Doc.from_json: formatting changes. * Implement Doc.to_json(): reverting unrelated...
29
0
24,401
12
1
15
def is_sequence_right_padded(mask): max_seq_length = tf.shape(mask)[1] count_of_true = tf.reduce_sum(tf.cast(mask, tf.int32), axis=1) right_padded_mask = tf.sequence_mask( count_of_true, maxlen=max_seq_length) return tf.reduce_all(tf.equal(mask, right_padded_mask))
keras/layers/rnn/gru_lstm_utils.py
100
keras
{ "docstring": "Check the mask tensor and see if it right padded.\n\n For cuDNN kernel, it uses the sequence length param to skip the tailing\n timestep. If the data is left padded, or not a strict right padding (has\n masked value in the middle of the sequence), then cuDNN kernel won't be work\n properly in thos...
18
Python
16
01c906c4178db5ae03b7eb2d298a052c952a0667
gru_lstm_utils.py
268,968
6
64
is_sequence_right_padded
https://github.com/keras-team/keras.git
Reorganize RNN layers, cells and wrappers into smaller logically organized files hosted under an `rnn` directory. PiperOrigin-RevId: 428841673
28
0
79,793
11
1
7
def query_cursor_left(self) -> bool: previous_index = self.cursor_index new_index = max(0, previous_index - 1) return previous_index != new_index
src/textual/widgets/text_input.py
45
textual
{ "docstring": "Check if the cursor can move 1 character left in the text", "language": "en", "n_whitespaces": 11, "n_words": 12, "vocab_size": 11 }
17
Python
13
9e25752c859d25c172697236b94997a38c0799bf
text_input.py
183,346
5
27
query_cursor_left
https://github.com/Textualize/textual.git
Scrolling within text input
45
0
44,157
9
15
54
def resample(*arrays, replace=True, n_samples=None, random_state=None, stratify=None): max_n_samples = n_samples random_state = check_random_state(random_state) if len(arrays) == 0: return None first = arrays[0] n_samples = first.shape[0] if hasattr(first, "shape") else len(first) ...
sklearn/utils/__init__.py
579
scikit-learn
{ "docstring": "Resample arrays or sparse matrices in a consistent way.\n\n The default strategy implements one step of the bootstrapping\n procedure.\n\n Parameters\n ----------\n *arrays : sequence of array-like of shape (n_samples,) or \\\n (n_samples, n_outputs)\n Indexable data-s...
235
Python
156
82cd3d74f252e7d4c5e733b530897d499d5d640b
__init__.py
260,482
44
365
resample
https://github.com/scikit-learn/scikit-learn.git
DOC numpydoc validation for `resample` function (#23916)
576
0
76,278
19
6
18
def resolve_model_string(model_string, default_app=None): if isinstance(model_string, str): try: app_label, model_name = model_string.split(".") except ValueError: if default_app is not None: # If we can't split, assume a model in current app ...
wagtail/core/utils.py
183
wagtail
{ "docstring": "\n Resolve an 'app_label.model_name' string into an actual model class.\n If a model class is passed in, just return that.\n\n Raises a LookupError if a model can not be found, or ValueError if passed\n something that is neither a model or a string.\n ", "language": "en", "n_whitesp...
82
Python
59
d10f15e55806c6944827d801cd9c2d53f5da4186
utils.py
74,693
21
101
resolve_model_string
https://github.com/wagtail/wagtail.git
Reformat with black
311
0
16,297
19
3
11
def trigger(self, sender, event, data=None): if not self.figure.canvas.widgetlock.available(sender): return if data is not None: self.draw_rubberband(*data) else: self.remove_rubberband()
lib/matplotlib/backend_tools.py
82
matplotlib
{ "docstring": "Call `draw_rubberband` or `remove_rubberband` based on data.", "language": "en", "n_whitespaces": 6, "n_words": 7, "vocab_size": 7 }
17
Python
15
d86a5050b57fc2f3f95d23d94f6c64f86dac2cd3
backend_tools.py
109,085
7
50
trigger
https://github.com/matplotlib/matplotlib.git
Fix method subclassing inconsistencies
78
0
23,431
11
4
7
def default_device(dev=None): if ivy.exists(dev): _assert_dev_correct_formatting(dev) return dev global default_device_stack if not default_device_stack: default_device_stack = ['gpu:0'] if ivy.gpu_is_available() else ['cpu'] return default_device_stack[-1] # noinspection ...
ivy/core/device.py
86
ivy
{ "docstring": "\n Return the input dev if provided, otherwise return the global default device.\n ", "language": "en", "n_whitespaces": 19, "n_words": 12, "vocab_size": 11 }
24
Python
20
d743336b1f3654cd0315f380f43eed4116997c1d
device.py
213,600
8
49
default_device
https://github.com/unifyai/ivy.git
renamed dev_str arg to dev for all methods.
59
0
53,669
11
5
22
def save_optimizer_weights_to_hdf5_group(hdf5_group, optimizer): symbolic_weights = getattr(optimizer, "weights") if symbolic_weights: weights_group = hdf5_group.create_group("optimizer_weights") weight_names = [str(w.name).encode("utf8") for w in symbolic_weights] save_attributes_...
keras/saving/hdf5_format.py
185
keras
{ "docstring": "Saves optimizer weights of a optimizer to a HDF5 group.\n\n Args:\n hdf5_group: HDF5 group.\n optimizer: optimizer instance.\n ", "language": "en", "n_whitespaces": 37, "n_words": 17, "vocab_size": 12 }
52
Python
38
84afc5193d38057e2e2badf9c889ea87d80d8fbf
hdf5_format.py
275,861
17
113
save_optimizer_weights_to_hdf5_group
https://github.com/keras-team/keras.git
Reformatting the codebase with black. PiperOrigin-RevId: 450093126
218
0
81,486
15
8
7
def powdenest(eq, force=False, polar=False): r from sympy.simplify.simplify import posify if force:
sympy/simplify/powsimp.py
39
sympy
{ "docstring": "\n Collect exponents on powers as assumptions allow.\n\n Explanation\n ===========\n\n Given ``(bb**be)**e``, this can be simplified as follows:\n * if ``bb`` is positive, or\n * ``e`` is an integer, or\n * ``|be| < 1`` then this simplifies to ``bb**(be*e)``\n\n Giv...
11
Python
11
2a1afca9477eb781f16d5d6b63fa37abed7740a3
powsimp.py
198,305
110
182
powdenest
https://github.com/sympy/sympy.git
Use sympify less
19
0
48,865
6
4
15
def set_anchor(self, anchor): if isinstance(anchor, str): _api.check_in_list(mtransforms.Bbox.coefs, anchor=anchor) elif not isinstance(anchor, (tuple, list)) or len(anchor) != 2: raise TypeError("anchor must be str or 2-tuple") self._anchor = anchor
lib/mpl_toolkits/axes_grid1/axes_divider.py
97
matplotlib
{ "docstring": "\n Parameters\n ----------\n anchor : (float, float) or {'C', 'SW', 'S', 'SE', 'E', 'NE', 'N', \\\n'NW', 'W'}\n Either an (*x*, *y*) pair of relative coordinates (0 is left or\n bottom, 1 is right or top), 'C' (center), or a cardinal direction\n ('...
27
Python
25
e94dfed864a8bbeb215bab5705a490325ac07819
axes_divider.py
109,157
6
60
set_anchor
https://github.com/matplotlib/matplotlib.git
Improve argument checking
77
0
23,457
11
3
5
def parsedate_tz(data): res = _parsedate_tz(data) if not res: return if res[9] is None: res[9] = 0 return tuple(res)
python3.10.4/Lib/email/_parseaddr.py
61
XX-Net
{ "docstring": "Convert a date string to a time tuple.\n\n Accounts for military timezones.\n ", "language": "en", "n_whitespaces": 18, "n_words": 12, "vocab_size": 11 }
18
Python
14
8198943edd73a363c266633e1aa5b2a9e9c9f526
_parseaddr.py
223,619
7
36
parsedate_tz
https://github.com/XX-net/XX-Net.git
add python 3.10.4 for windows
47
0
57,010
9