text stringlengths 67 7.88k |
|---|
<|fim_prefix|>def <|fim_suffix|>(self, session):
pass<|fim_middle|>on_204<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(g):
g.cmd(b's', b'T05thread:01;')<|fim_middle|>gdb_step<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.check('/admin/default/shell')
ws_url = server.base_url.replace('http://', 'ws://') + '/admin/default/webshell-data'
ws = create_connection(ws_url)
# Python expressions are computed
ws.send('1 + 2')
eq_(ws.recv(), '3')
# Session state is maintained... |
<|fim_prefix|>def <|fim_suffix|>(self, x):
self.__buf.write(struct.pack('>L', x))<|fim_middle|>pack_uint<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
action = ChatJoinRequestHandler(self.callback)
for attr in action.__slots__:
assert getattr(action, attr, "err") != "err", f"got extra slot '{attr}'"
assert len(mro_slots(action)) == len(set(mro_slots(action))), "duplicate slot"<|fim_middle|>test_slot_behaviou... |
<|fim_prefix|>def <|fim_suffix|>(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")<|fim_middle|>id<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(path: Optional[Path] = None) -> Path:
if path is None:
path = Path.cwd()
here = path
while here.parent != here:
config = here / ".neuro.toml"
if config.exists():
return here
here = here.parent
raise ConfigError(f"Project root i... |
<|fim_prefix|>def <|fim_suffix|>():
parser = argparse.ArgumentParser(
description=USAGE,
prog="ddtrace-run",
usage="ddtrace-run <your usual python command>",
formatter_class=argparse.RawTextHelpFormatter,
)
parser.add_argument("command", nargs=argparse.REMAINDER, type=str, he... |
<|fim_prefix|>def <|fim_suffix|>(iterable, n):
"""
Split a interable into chunks of length n with the final element
being the remainder len < n if n does not divide evenly
"""
len_iter = len(iterable)
return [iterable[i: min(i + n, len_iter)] for i in range(0, len_iter, n)]<|fim_middle|>take_n<|... |
<|fim_prefix|>def <|fim_suffix|>(self):
log.debug("Loading live event")
res = self.request("GET", self.live_url)
for event in res.get("events", []):
return "event/{sportId}/{propertyId}/{tournamentId}/{id}".format(**event)<|fim_middle|>get_live_id<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(n_servers, i=None):
return server_n<|fim_middle|>dist_fcn_1_server<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> 'outputs.PrivateEndpointConnectionPropertiesResponse':
"""
Resource properties.
"""
return pulumi.get(self, "properties")<|fim_middle|>properties<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
cli_params = ['application_name', 'config_file', 'eu-west-1', '--destinationTableAutoCreate', '--connection-pre-test', 'False']
config_reader = GlobalConfigParametersReader()
default_parameters = config_reader.get_config_key_values_updated_with_cli_args(cli_params)
... |
<|fim_prefix|>def <|fim_suffix|>(
staff_api_client, permission_manage_shipping, shipping_method
):
# given
shipping_method.store_value_in_private_metadata({PUBLIC_KEY: PUBLIC_VALUE})
shipping_method.save(update_fields=["metadata"])
shipping_method_id = graphene.Node.to_global_id(
"ShippingMe... |
<|fim_prefix|>def <|fim_suffix|>(self):
if not session.user:
raise Forbidden
# If the user cannot manage the whole event see if anything gives them
# limited management access.
if not self.event.can_manage(session.user):
urls = sorted(values_from_signal(signals.event_management.managemen... |
<|fim_prefix|>def <|fim_suffix|>(cursor) -> List[Tuple[DbTableSchema, str]]:
schemas: Dict = {}
for row in cursor.fetchall():
table_schema_name: str = row[_TABLE_SCHEMA]
table_name: DbTableMeta = DbTableMeta(row[_TABLE_NAME])
table_column: DbColumn = DbColumn(
name=row[_COLUM... |
<|fim_prefix|>def <|fim_suffix|>():
# Try again with a target with a stretched y axis.
A_orig = np.array([[-3, 3], [-2, 3], [-2, 2], [-3, 2]], dtype=float)
B_orig = np.array([[3, 40], [1, 0], [3, -40], [5, 0]], dtype=float)
A, A_mu = _centered(A_orig)
B, B_mu = _centered(B_orig)
R, s = orthogona... |
<|fim_prefix|>def <|fim_suffix|>(
mock_smb_client: SMBClient,
smb_remote_access_client: SMBRemoteAccessClient,
):
tags = EXPLOITER_TAGS.copy()
smb_remote_access_client.login(FULL_CREDENTIALS[0], set())
smb_remote_access_client.execute_agent(DESTINATION_PATH, tags)
assert tags == EXPLOITER_TAGS.u... |
<|fim_prefix|>def <|fim_suffix|>(self) -> str:
"""
Gets the workflow trigger callback URL relative path.
"""
return pulumi.get(self, "relative_path")<|fim_middle|>relative_path<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
form_data = {
"name": "Assunto 2",
"visible": True,
"init_date": datetime.now() + timedelta(days=2),
"end_date": datetime.now() + timedelta(days=3),
"subscribe_begin": datetime.now(),
"subscribe_end": datetime.now() + timed... |
<|fim_prefix|>def <|fim_suffix|>(self):
self.deployment_type = "AllAtOnce"
self.pre_traffic_hook = "pre_traffic_function_ref"
self.post_traffic_hook = "post_traffic_function_ref"
self.alarms = ["alarm1ref", "alarm2ref"]
self.role = {"Ref": "MyRole"}
self.trigger_configurations = {
"Trigg... |
<|fim_prefix|>def <|fim_suffix|>(x, n):
c = 0.9
mu = (np.arange(1, n+1) - 0.5)/n
return x - 1/(1 - c/(2*n) * (mu[:,None]*x / (mu[:,None] + mu)).sum(axis=1))<|fim_middle|>f_6<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
args = argsparser()
config_parser = ConfigParser(args)
args = config_parser.parser()
random.seed(args.seed)
np.random.seed(args.seed)
paddle.seed(args.seed)
paddle.device.set_device(args.device)
class_name = args.category
assert class_name in mvtec... |
<|fim_prefix|>def <|fim_suffix|>(api_dir, xml_dir):
import subprocess, sys
try:
# We don't generate groups since we create those manually
ret = subprocess.call('breathe-apidoc -m -o %s -p openucx %s -g struct,file' % (api_dir, xml_dir), shell=True)
if ret < 0:
sys.stderr.writ... |
<|fim_prefix|>def <|fim_suffix|>(
tmp_path: Path,
filename: str,
fmt: str | None,
data: str,
expected: Any,
testing_metadata,
):
path = tmp_path / filename
path.write_text(data)
assert (
jinja_context.load_file_data(str(path), fmt, config=testing_metadata.config)
== e... |
<|fim_prefix|>def <|fim_suffix|>(self) -> int:
return hash(self)<|fim_middle|>hash_code<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
"""Open preferences dialog"""
widgets = gamewidget.getWidgets()
preferencesDialog.run(widgets)
notebook = widgets["preferences_notebook"]
self.assertIsNotNone(preferencesDialog.general_tab)
notebook.next_page()
self.assertIsNotNone(preferencesDialog.hi... |
<|fim_prefix|>def <|fim_suffix|>(dataarray) -> None:
data_repr = fh.short_data_repr_html(dataarray)
assert data_repr.startswith("<pre>array")<|fim_middle|>test_short_data_repr_html<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, fileno, new=False):
mask = 0
if self.listeners[self.READ].get(fileno):
mask |= self.READ_MASK | self.EXC_MASK
if self.listeners[self.WRITE].get(fileno):
mask |= self.WRITE_MASK | self.EXC_MASK
try:
if mask:
if new:
... |
<|fim_prefix|>def <|fim_suffix|>(self):
# restart the collectd mapper to use recently set port
c8y_mapper_status = self.startProcess(
command=self.sudo,
arguments=["systemctl", "restart", "tedge-mapper-collectd.service"],
stdouterr="collectd_mapper_restart",
)
# check the status ... |
<|fim_prefix|>def <|fim_suffix|>(self, collection_name, vectors, top_k):
# Search vector in milvus collection
try:
self.set_collection(collection_name)
search_params = {
"metric_type": METRIC_TYPE,
"params": {
"nprobe": 16
}
}
r... |
<|fim_prefix|>def <|fim_suffix|>(q, t, q_len, t_len):
"""Compute the sliding dot products between a query and a time series.
Parameters
----------
q: numpy.array
Query.
t: numpy.array
Time series.
q_len: int
Length of the query.
t_len: int
... |
<|fim_prefix|>def <|fim_suffix|>(file_path, size=None):
"""
Turn given picture into a smaller version.
"""
im = Image.open(file_path)
if size is not None:
(width, height) = size
if height == 0:
size = get_full_size_from_width(im, width)
else:
size = im.size
... |
<|fim_prefix|>def <|fim_suffix|>(self):
pass<|fim_middle|>test_future<|file_separator|> |
<|fim_prefix|> <|fim_suffix|>(self):<|fim_middle|>contents<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
group_delete_mock = MagicMock(return_value=True)
group_info_mock = MagicMock(return_value={"things": "stuff"})
with patch.dict(group.__salt__, {"group.delete": group_delete_mock}), patch.dict(
group.__salt__, {"group.info": group_info_mock}
):
ret = gr... |
<|fim_prefix|>def <|fim_suffix|>(bin):
if type(bin) == type(bytes()):
try:
return bytes.decode(bin, encoding='utf-8', errors='strict')
except:
pass
# we want a hexdump in \xNN notation. bin.hex only takes a single char, so we replace that later.
return "\\x" + bin.hex(':').replace(':', "\\... |
<|fim_prefix|>def <|fim_suffix|>():
# One of these environment variables are guaranteed to exist
# from our official docker images.
# DISPATCH_VERSION is from a tagged release, and DISPATCH_BUILD is from a
# a git based image.
return "DISPATCH_VERSION" in os.environ or "DISPATCH_BUILD" in os.environ... |
<|fim_prefix|>def <|fim_suffix|>(validate_event_schema):
def inner(message, **kwargs):
event = serialize({"logentry": {"message": message}}, **kwargs)
validate_event_schema(event)
return event["logentry"]["message"]
return inner<|fim_middle|>message_normalizer<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
s = vaex.string_column(["aap", None, "noot", "mies"])
o = ["aap", None, "noot", np.nan]
x = np.arange(4, dtype=np.float64)
x[2] = x[3] = np.nan
m = np.ma.array(x, mask=[0, 1, 0, 1])
df = vaex.from_arrays(x=x, m=m, s=s, o=o)
x = df.x.dropmissing().tolist()
... |
<|fim_prefix|>def <|fim_suffix|>(A, node_features, k):
"""
Compute the k-hop adjacency matrix and aggregated features using message passing.
Parameters:
A (numpy array or scipy sparse matrix): The adjacency matrix of the graph.
node_features (numpy array or scipy sparse matrix): The feature matrix o... |
<|fim_prefix|>def <|fim_suffix|>(self):
if self.options.shared:
self.options.rm_safe("fPIC")
self.options["trantor"].shared = True
if not self.options.with_orm:
del self.options.with_postgres
del self.options.with_postgres_batch
del self.options.with_mysql
del sel... |
<|fim_prefix|>async def <|fim_suffix|>(self):
pass<|fim_middle|>async_tear_down<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
self,
recipe: BaseRecipe,
recipe_conf: PerfRecipeConf,
results: List[PerfMeasurementResults],
) -> List[List[PerfMeasurementResults]]:
results_by_host = self._divide_results_by_host(results)
for host_results in results_by_host.values():
yield host_result... |
<|fim_prefix|>def <|fim_suffix|>():
aq17 = ThermoFunDatabase("aq17")
T = 298.15
P = 1.0e5
#-------------------------------------------------------------------
# Testing attributes and thermodynamic properties of H2O@
#-------------------------------------------------------------------
specie... |
<|fim_prefix|>def <|fim_suffix|>(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
... |
<|fim_prefix|>def <|fim_suffix|>(self, record: logging.LogRecord) -> str:
levelname = record.levelname
if self.use_color and levelname in self.COLORS:
levelname_with_color = (
self.COLOR_SEQ % (30 + self.COLORS[levelname])
+ levelname
+ self.RESET_SEQ
)
... |
<|fim_prefix|>def <|fim_suffix|>(self):
section = self.doc_structure.add_new_section('mysection')
section.writeln('section contents')
self.doc_structure.hrefs['foo'] = 'www.foo.com'
section.hrefs['bar'] = 'www.bar.com'
contents = self.doc_structure.flush_structure()
self.assertIn(b'.. _foo: www.... |
<|fim_prefix|>def <|fim_suffix|>():
"""
"vendors" notary into docker by copying all of notary into the docker
vendor directory - also appending several lines into the Dockerfile because
it pulls down notary from github and builds the binaries
"""
docker_notary_relpath = "vendor/src/github.com/th... |
<|fim_prefix|>def <|fim_suffix|>(context, data_dict):
return {'success': False, 'msg': 'Not implemented yet in the auth refactor'}<|fim_middle|>revision_undelete<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
if not isRunningAsRoot():
return False
if not isMMapSupported():
return False
return True<|fim_middle|>is_timing_series_supported<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(filename, line):
"""
Append one line of text to filename.
:param filename: Path to the file.
:type filename: str
:param line: Line to be written.
:type line: str
"""
append_file(filename, line.rstrip("\n") + "\n")<|fim_middle|>append_one_line<|file_separa... |
<|fim_prefix|>def <|fim_suffix|>(self, assembler):
"""
Create a list of functions to be tested and their reference values for the problem
"""
func_list = [
functions.StructuralMass(assembler),
functions.Compliance(assembler),
functions.KSDisplacement(
assembler, ksWei... |
<|fim_prefix|>def <|fim_suffix|>(request, kube_apis):
filtered_ns_1 = create_namespace_with_name_from_yaml(kube_apis.v1, f"filtered-ns-1", f"{TEST_DATA}/common/ns.yaml")
filtered_ns_2 = create_namespace_with_name_from_yaml(kube_apis.v1, f"filtered-ns-2", f"{TEST_DATA}/common/ns.yaml")
filtered_secret_1 = cr... |
<|fim_prefix|>def <|fim_suffix|>(name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[str]] = None,
workspace_name: Optional[pulumi.Input[str]] = None,
... |
<|fim_prefix|> <|fim_suffix|>( self ) :<|fim_middle|>test_copy<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(address: str) -> bytes32:
hrpgot, data = bech32_decode(address)
if data is None:
raise ValueError("Invalid Address")
decoded = convertbits(data, 5, 8, False)
decoded_bytes = bytes32(decoded)
return decoded_bytes<|fim_middle|>decode_puzzle_hash<|file_separator... |
<|fim_prefix|>def <|fim_suffix|>(en_vocab):
doc = Doc(en_vocab, words=["hello", "world"])
with make_tempdir() as d:
file_path = d / "doc"
doc.to_disk(file_path)
doc_d = Doc(en_vocab).from_disk(file_path)
assert doc.to_bytes() == doc_d.to_bytes()<|fim_middle|>test_serialize_doc_ro... |
<|fim_prefix|>def <|fim_suffix|>(self):
with self.assertRaises(ValueError):
losses.regularization_penalty("l1_l2", 1e-4, [])<|fim_middle|>test_regulaization_missing_scale_value<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
"""Parse command line arguments using argparse.
"""
parser = argparse.ArgumentParser(description=DESCRIPTION)
parser.add_argument(
'-V', '--version',
action='version',
version='{0}: v{1} by {2}'.format('%(prog)s', __version__, __author__)
)... |
<|fim_prefix|>def <|fim_suffix|>(
component: ComponentSpec,
cross_section: CrossSectionSpec = "strip",
port1: str = "o1",
port2: str = "o2",
straight_length: float | None = None,
**kwargs,
) -> ComponentSpec:
"""Returns double straight.
Args:
component: for cutback.
cross... |
<|fim_prefix|>def <|fim_suffix|>(self):
""" BaseDirectory with no existence check accepts any pathlib path.
"""
foo = SimpleBaseDirectory()
foo.path = pathlib.Path("!!!")
self.assertIsInstance(foo.path, str)<|fim_middle|>test_simple_accepts_any_pathlib<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
fmt = """
# comments are allowed
> # big endian (see documentation for struct)
# empty lines are allowed:
ashort: h
along: l
abyte: b # a byte
achar: c
astr: 5s
afloat: f; adouble: d # multiple "statements" are allowed
afixed: 16.16F
abool: ?
apad: x
"""... |
<|fim_prefix|>def <|fim_suffix|>(tmp_path):
outfilename = tmp_path / "vu_tide_hourly_p0.dfs0"
ds = mikeio.read("tests/testdata/vu_tide_hourly.dfs1")
assert ds.n_elements > 1
ds_0 = ds.isel(0, axis="space")
assert ds_0.n_elements == 1
ds_0_0 = ds_0.isel(0)
assert ds_0_0.n_timesteps == 1
d... |
<|fim_prefix|>def <|fim_suffix|>(self):
self.window.show_all()
self.window.present()<|fim_middle|>show<|file_separator|> |
<|fim_prefix|>async def <|fim_suffix|>(
auth: AcaPyAuth = Depends(acapy_auth),<|fim_middle|>create_did<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
session = requests.Session()
make_session_public_only(session, 'demo_domain', src='testing')
return session<|fim_middle|>set_up_session<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
self.assertEqual(build_password("plain"), "plaintext:plain")<|fim_middle|>test_default_plaintext<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
user_id: str
) -> List[learner_group_domain.LearnerGroup]:
"""Returns a list of learner groups of the given facilitator.
Args:
user_id: str. The id of the facilitator.
Returns:
list(LearnerGroup). A list of learner groups of the given facilitator.
""... |
<|fim_prefix|>def <|fim_suffix|>(self, value: Optional[float]) -> None:
"""When not draining we pass thru to the socket,
since when draining we control the timeout.
"""
if value is not None:
self._recv_timeout_sec = value
if self._drain_thread is None:
socket.socket.METHOD_NAME(se... |
<|fim_prefix|>def <|fim_suffix|>(
self,
description: str,
params: Mapping[str, Any],
url: bool | None = False,
provider: ExternalProviders | None = None,
) -> str:
if self.user:
name = self.user.name or self.user.email
else:
name = "Sentry"
issue_name = self.group.qualifi... |
<|fim_prefix|>def <|fim_suffix|>(self, native_face):
self._face = native_face
self._loops = [RhinoBrepLoop(loop) for loop in native_face.Loops]
self._surface = RhinoNurbsSurface.from_rhino(self._face.UnderlyingSurface().ToNurbsSurface())<|fim_middle|>set_face<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, user):
return self.get_for_user(user, teammembership__role=TeamMembership.ROLE.OWNER)<|fim_middle|>get_owner_teams<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
column = BigqueryColumn(
name="date",
field_path="date",
ordinal_position=1,
data_type="TIMESTAMP",
is_partition_column=True,
cluster_column_position=None,
comment=None,
is_nullable=False,
)
partition_info = ... |
<|fim_prefix|>def <|fim_suffix|>(self, inputs, metric, functional_metric, ref_metric, ignore_index):
"""Test functional implementation of metric."""
preds, target = inputs
if ignore_index is not None:
target = inject_ignore_index(target, ignore_index)
self.run_functional_metric_test(
pre... |
<|fim_prefix|>def <|fim_suffix|>(self, positions: TensorType["bs":..., 3]) -> TensorType["bs":..., 1]:
"""Returns only the density. Used primarily with the density grid.
Args:
positions: the origin of the samples/frustums
"""
# Need to figure out a better way to descibe positions with a ray.
... |
<|fim_prefix|> <|fim_suffix|>(self, old_name, new_name, merge=False):<|fim_middle|>after_rename<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
examinee = create_upgrade_pr(
from_ref=cm.ComponentReference(
name='c1',
componentName='c1',
version='1.2.3',
),
to_ref=cm.ComponentReference(
name='c1',
componentName='c1',
version='2... |
<|fim_prefix|>def <|fim_suffix|>(testsystem_names, niterations=5):
"""
Run sampler stack on named test systems.
Parameters
----------
testsystem_names : list of str
Names of test systems to run
niterations : int, optional, default=5
Number of iterations to run
"""
for tes... |
<|fim_prefix|>def <|fim_suffix|>(self) -> Response:
"""
Get a list with all of the tabels in TDEngine
"""
q = 'SHOW TABLES;'
return self.native_query(q)<|fim_middle|>get_tables<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(
self,
configs: List[Config[ModelConfig]],
performances: List[Performance],
) -> None:
super().METHOD_NAME(configs, performances)
# We need to sort by dataset to have the same ordering for each model config
ordering = np.argsort([c.dataset.name() for c in configs... |
<|fim_prefix|>async def <|fim_suffix|>(mock_iam_client):
group = await get_group(EXAMPLE_GROUPNAME, mock_iam_client)
assert group["GroupName"] == EXAMPLE_GROUPNAME<|fim_middle|>test_get_group<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, Paramsmulticast):
# controle parameters multicast
return self.api.SetMulticastMultiSessionParameters(Paramsmulticast)<|fim_middle|>xmlrpc_set_multicast_multi_session_parameters<|file_separator|> |
<|fim_prefix|>f <|fim_suffix|>(self):<|fim_middle|>test_process_file<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(m):
opt = pyo.SolverFactory('gurobi')
res = opt.solve(m)
assert_optimal_termination(res)<|fim_middle|>solve_warehouse_location<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self) -> str:
"""
Resource ID.
"""
return pulumi.get(self, "id")<|fim_middle|>id<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
return self.event.METHOD_NAME + f"/session/{self.id}"<|fim_middle|>site_link<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(colorer, s, i):
return colorer.match_seq_regexp(s, i, kind="label", regexp="`[A-z0-9]+[^`]+`_{1,2}")<|fim_middle|>rest_rule17<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self):
for pos in self:
seq = pos.l10n_es_simplified_invoice_sequence_id
pos.l10n_es_simplified_invoice_number = (
seq._get_current_sequence().number_next_actual
)
pos.l10n_es_simplified_invoice_prefix = seq._get_prefix_suffix()[0]
... |
<|fim_prefix|>def <|fim_suffix|>(self):
x = tensor.Tensor(np.array([1, 2, 3]))
self.assertEqual(x.rank, 1)<|fim_middle|>test_rank_is_one_for_vector<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
assert not np.isnan(atmosphere.get_relative_airmass(10))<|fim_middle|>test_airmass_scalar<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
"""Return the default filters (all available filters)."""
return dict((name, set(PlayerIter(name))) for name in PlayerIter.filters)<|fim_middle|>get_default_filters<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, token_ids: Sequence[bytes]) -> Sequence[KlerosToken]:
queries = []
for token_id in token_ids:
queries.append(self.kleros_contract.functions.getTokenInfo(token_id))
# name string, ticker string, addr address, symbolMultihash string, status uint8, numberOfRequest... |
<|fim_prefix|>def <|fim_suffix|>(
self, aligned_segment_starting_times: List[List[float]], stub_test: bool = False
):
"""
Align the individual starting time for each video in this interface relative to the common session start time.
Must be in units seconds relative to the common 'session_start_time'.
... |
<|fim_prefix|>def <|fim_suffix|>(self, msg):
pass<|fim_middle|>on_inv<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>(self, output, identifier):
return self._wrapped.METHOD_NAME(output._lines, identifier)<|fim_middle|>get_value_from_output<|file_separator|> |
<|fim_prefix|>def <|fim_suffix|>():
x = np.zeros((5, 5), dtype=int)
array_2d_view_assign(x[::, ::], 9)
array_2d_view_assign(x[:2:2, :2:3], 10)
array_2d_view_assign(x[3::2, 3::3], 11)
array_2d_view_assign(x[1:2, 2:3], 12)
array_1d_view_assign(x[0, :], 1)
array_1d_view_assign(x[1, ::2], 2)
... |
<|fim_prefix|>def <|fim_suffix|>(iterable):
"""Test whether visitors properly set the type constraint of the a For node representing for/else statement
iterating over a heterogeneous list.
"""
assume(type(iterable[0]) != type(iterable[1]))
val_types = [type(val) for val in iterable]
if int in va... |
<|fim_prefix|>def <|fim_suffix|>(plistpath, content):
"""A test utility to create a plist file with known content.
Ensures that the directory for the file exists, and writes an XML plist with
specific content.
:param plistpath: The path for the plist file to create.
:param content: A dictionary of c... |
<|fim_prefix|>def <|fim_suffix|>(instance, check, aggregator):
del instance['custom_queries']
with mock.patch(
'datadog_checks.ibm_was.IbmWasCheck.make_request', return_value=mock_data('perfservlet-multiple-nodes.xml')
):
check = check(instance)
check.check(instance)
node = 'node... |
End of preview. Expand in Data Studio
README.md exists but content is empty.
- Downloads last month
- 12