python_code stringlengths 0 869k |
|---|
from setuptools import setup, find_packages
setup(
name='coinrun',
packages=find_packages(),
version='0.0.1',
)
|
import numpy as np
from coinrun import setup_utils, make
def random_agent(num_envs=1, max_steps=100000):
setup_utils.setup_and_load(use_cmd_line_args=False)
env = make('standard', num_envs=num_envs)
for step in range(max_steps):
acts = np.array([env.action_space.sample() for _ in range(env.num_env... |
"""
Load an agent trained with train_agent.py and
"""
import time
import tensorflow as tf
import numpy as np
from coinrun import setup_utils
import coinrun.main_utils as utils
from coinrun.config import Config
from coinrun import policies, wrappers
mpi_print = utils.mpi_print
def create_act_model(sess, env, nenvs)... |
"""
Train an agent using a PPO2 based on OpenAI Baselines.
"""
import time
from mpi4py import MPI
import tensorflow as tf
from baselines.common import set_global_seeds
import coinrun.main_utils as utils
from coinrun import setup_utils, policies, wrappers, ppo2
from coinrun.config import Config
def main():
args = ... |
from mpi4py import MPI
import argparse
import os
class ConfigSingle(object):
"""
A global config object that can be initialized from command line arguments or
keyword arguments.
"""
def __init__(self):
self.WORKDIR = './saved_models/'
self.TB_DIR = '/tmp/tensorflow'
if not o... |
"""
This is a copy of PPO from openai/baselines (https://github.com/openai/baselines/blob/52255beda5f5c8760b0ae1f676aa656bb1a61f80/baselines/ppo2/ppo2.py) with some minor changes.
"""
import time
import joblib
import numpy as np
import tensorflow as tf
from collections import deque
from mpi4py import MPI
from coinru... |
import tensorflow as tf
from mpi4py import MPI
from coinrun.config import Config
import numpy as np
def clean_tb_dir():
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
if rank == 0:
if tf.gfile.Exists(Config.TB_DIR):
tf.gfile.DeleteRecursively(Config.TB_DIR)
tf.gfile.MakeDirs(Con... |
from .coinrunenv import init_args_and_threads
from .coinrunenv import make
__all__ = [
'init_args_and_threads',
'make'
]
|
import gym
import numpy as np
class EpsilonGreedyWrapper(gym.Wrapper):
"""
Wrapper to perform a random action each step instead of the requested action,
with the provided probability.
"""
def __init__(self, env, prob=0.05):
gym.Wrapper.__init__(self, env)
self.prob = prob
s... |
"""
Run a CoinRun environment in a window where you can interact with it using the keyboard
"""
from coinrun.coinrunenv import lib
from coinrun import setup_utils
def main():
setup_utils.setup_and_load(paint_vel_info=0)
print("""Control with arrow keys,
F1, F2 -- switch resolution,
F5, F6, F7, F8 -- zoom,
F9... |
import tensorflow as tf
import os
import joblib
import numpy as np
from mpi4py import MPI
from baselines.common.vec_env.vec_frame_stack import VecFrameStack
from coinrun.config import Config
from coinrun import setup_utils, wrappers
import platform
def make_general_env(num_env, seed=0, use_sub_proc=True):
from ... |
from coinrun.config import Config
import os
import joblib
def load_for_setup_if_necessary():
restore_file(Config.RESTORE_ID)
def restore_file(restore_id, load_key='default'):
if restore_id is not None:
load_file = Config.get_load_filename(restore_id=restore_id)
filepath = file_to_path(load_fi... |
from coinrun import random_agent
def test_coinrun():
random_agent.random_agent(num_envs=16, max_steps=100)
if __name__ == '__main__':
test_coinrun() |
import numpy as np
import tensorflow as tf
from baselines.a2c.utils import conv, fc, conv_to_fc, batch_to_seq, seq_to_batch, lstm
from baselines.common.distributions import make_pdtype
from baselines.common.input import observation_input
from coinrun.config import Config
def impala_cnn(images, depths=[16, 32, 32]):
... |
"""
Python interface to the CoinRun shared library using ctypes.
On import, this will attempt to build the shared library.
"""
import os
import atexit
import random
import sys
from ctypes import c_int, c_char_p, c_float, c_bool
import gym
import gym.spaces
import numpy as np
import numpy.ctypeslib as npct
from basel... |
import json
import pickle
import math
import sys
import argparse
import warnings
from os import makedirs
from os.path import basename, join, exists, dirname, splitext, realpath
from wikidata_linker_utils.progressbar import get_progress_bar
from dataset import TSVDataset, CombinedDataset, H5Dataset, ClassificationHand... |
import numpy as np
import subprocess
import h5py
import ciseau
from os.path import exists, splitext, join
from wikidata_linker_utils.wikidata_ids import load_wikidata_ids
def count_examples(lines, comment, ignore_value, column_indices):
example_length = 0
has_labels = False
found = 0
for line in lines:... |
import queue
import threading
def prefetch_generator(generator, to_fetch=10):
q = queue.Queue(maxsize=to_fetch)
def thread_worker(queue, gen):
for val in gen:
queue.put(val)
queue.put(None)
t = threading.Thread(target=thread_worker, args=(q, generator))
some_exception = N... |
"""
Obtain a learnability score for each type axis.
Trains a binary classifier for each type and
gets its AUC.
Usage
-----
```
python3 evaluate_learnability.py sample_data.tsv --out report.json --wikidata /path/to/wikidata
```
"""
import json
import time
import argparse
from os.path import dirname, realpath, join
... |
import numpy as np
import string
from dataset import TSVDataset, H5Dataset, CombinedDataset
from generator import prefetch_generator
def word_dropout(inputs, rng, keep_prob):
inputs_ndim = inputs.ndim
mask_shape = [len(inputs)] + [1] * (inputs_ndim - 1)
return (
inputs *
(
rng.r... |
import distutils.ccompiler
import distutils.sysconfig
import re
import numpy as np
import sys
import subprocess
from setuptools import setup, find_packages
from os.path import join, dirname, realpath, relpath, splitext, exists, getmtime, relpath, lexists, islink
from os import walk, sep, remove, listdir, stat, symlink
... |
import re
STOP_WORDS = {'a', 'an', 'in', 'the', 'of', 'it', 'from', 'with', 'this', 'that', 'they', 'he',
'she', 'some', 'where', 'what', 'since', 'his', 'her', 'their', 'le', 'la', 'les', 'il',
'elle', 'ce', 'ça', 'ci', 'ceux', 'ceci', 'cela', 'celle', 'se', 'cet', 'cette',
'... |
import sys
import importlib.util
import traceback
from os.path import basename, splitext
def reload_module(path):
module_name, extension = splitext(basename(path))
if extension != ".py":
raise ValueError("path must have a .py extension (got %r)" % (path,))
spec = importlib.util.spec_from_file_loc... |
import subprocess
def execute_bash(command):
"""
Executes bash command, prints output and
throws an exception on failure.
"""
process = subprocess.Popen(command,
shell=True,
stdout=subprocess.PIPE,
stderr=... |
import json
import msgpack
import bz2
def iterate_bytes_jsons(fin, batch_size=1000):
current = []
for l in fin:
if l.startswith(b'{'):
current.append(l)
if len(current) >= batch_size:
docs = json.loads('[' + b"".join(current).decode('utf-8').rstrip(',\n') + ']')
... |
from numpy import logical_and, logical_not, logical_or
def logical_negate(truth, falses):
out = truth
for value in falses:
out = logical_and(out, logical_not(value))
return out
def logical_ors(values):
assert(len(values) > 0), "values cannot be empty."
out = values[0]
for val in valu... |
import json
import warnings
from os.path import join, exists
from functools import lru_cache
import marisa_trie
import requests
import numpy as np
from .successor_mask import (
successor_mask, invert_relation, offset_values_mask
)
from .offset_array import OffsetArray, SparseAttribute
from .wikidata_ids import (... |
INSTANCE_OF = "P31"
SUBCLASS_OF = "P279"
PART_OF = "P361"
OCCUPATION = "P106"
FIELD_OF_WORK = "P101"
FIELD_OF_THIS_OCCUPATION = "P425"
MEDICAL_SPECIALITY = "P1995"
GENRE = "P136"
SEX_OR_GENDER = "P21"
COUNTRY_OF_CITIZENSHIP = "P27"
COUNTRY = "P17"
CONTINENT = "P30"
LOCATED_IN_THE_ADMINISTRATIVE_TERRITORIAL_ENTITY = "P1... |
from os.path import exists
from os import stat
def true_exists(fname):
return exists(fname) and stat(fname).st_size > 100
|
import progressbar
percentage = progressbar.Percentage()
counter = progressbar.Counter()
bar = progressbar.Bar()
adaptive_eta = progressbar.AdaptiveETA()
class MessageProgressbar(progressbar.ProgressBar):
def set_message(self, message):
self.widgets[0] = message + " "
def set_item(self, item):
... |
from os.path import exists
import numpy as np
from .successor_mask import (
convert_to_offset_array, make_dense, make_sparse
)
def count_non_zero(dense):
return len(np.nonzero(dense[1:] - dense[:-1])[0]) + int(dense[0] != 0)
def should_compress(dense):
nonzeros = count_non_zero(dense)
return (2 * no... |
LANGUAGE_CODES = ["en", "zh", "fr", "ja",
"ru", "pt", "ca", "fa",
"ar", "fi", "hu", "id",
"es", "it", "war", "ceb",
"nl", "de", "sv", "ro",
"cs", "ko", "sr", "ms",
"tr", "min", "eo", "eu",
"kk",... |
import json
from collections import namedtuple
from os.path import join, dirname
def dict_fix_relative_paths(basepath, relative_paths):
if relative_paths is None:
relative_paths = []
def load(d):
new_obj = d.copy()
for key in relative_paths:
if key in new_obj:
... |
from os.path import exists, join, dirname
import marisa_trie
import json
from .file import true_exists
from os import makedirs
class MarisaAsDict(object):
def __init__(self, marisa):
self.marisa = marisa
def get(self, key, fallback):
value = self.marisa.get(key, None)
if value is None... |
import re
import numpy as np
from os.path import join
from epub_conversion import convert_wiki_to_lines
from epub_conversion.wiki_decoder import almost_smart_open
from .wikipedia_language_codes import LANGUAGE_CODES
from .file import true_exists
from .bash import execute_bash
from .successor_mask import (
load_re... |
"""
Compress a jsonl version of Wikidata by throwing about descriptions
and converting file to msgpack format.
Usage
-----
```
python3 compress_wikidata_msgpack.py wikidata.json wikidata.msgpack
```
"""
import argparse
import msgpack
from wikidata_linker_utils.wikidata_iterator import open_wikidata_file
from wikida... |
import argparse
from os.path import join
from os import makedirs
import marisa_trie
import numpy as np
from wikidata_linker_utils.bash import count_lines
from wikidata_linker_utils.progressbar import get_progress_bar
from wikidata_linker_utils.wikipedia import match_wikipedia_to_wikidata, load_redirections
from wiki... |
import argparse
import sys
import json
import time
import traceback
from os import makedirs
from os.path import join, dirname, realpath
from wikidata_linker_utils.repl import (
enter_or_quit, reload_module,
ALLOWED_RUNTIME_ERRORS,
ALLOWED_IMPORT_ERRORS
)
from wikidata_linker_utils.logic import logical_ors
... |
import argparse
from os import remove
from wikidata_linker_utils.bash import execute_bash
import h5py
def produce_window_dataset(path, window_size, out):
num_columns = 0
with open(path, "rt") as fin:
line_locations = []
for idx, line in enumerate(fin):
if "\t" in line:
... |
import json
import argparse
import time
import random
import numpy as np
from evaluate_type_system import fix_and_parse_tags
from wikidata_linker_utils.json import load_config
from wikidata_linker_utils.type_collection import TypeCollection
from wikidata_linker_utils.progressbar import get_progress_bar
from wikidata_... |
import sys
import pickle
import argparse
import requests
import marisa_trie
import traceback
import numpy as np
from os.path import join, dirname, realpath, exists
from os import stat
from collections import Counter
from itertools import product
from wikidata_linker_utils.anchor_filtering import clean_up_trie_source,... |
import argparse
import marisa_trie
import numpy as np
from os.path import join
from wikidata_linker_utils.progressbar import get_progress_bar
from wikidata_linker_utils.bash import count_lines
from wikidata_linker_utils.offset_array import save_record_with_offset
def parse_args(argv=None):
parser = argparse.Ar... |
import argparse
import time
import marisa_trie
import numpy as np
import pandas
from os.path import join, realpath, dirname
from os import makedirs
from wikidata_linker_utils.wikidata_iterator import open_wikidata_file
from wikidata_linker_utils.file import true_exists
from wikidata_linker_utils.bash import count_lin... |
import json
import time
import re
import argparse
from wikidata_linker_utils.wikipedia import iterate_articles
from multiprocessing import Pool
CATEGORY_PREFIXES = [
"Category:",
"Catégorie:",
"Categorie:",
"Categoría:",
"Categoria:",
"Kategorie:",
"Kategoria:",
"Категория:",
"Kat... |
"""
Create a tsv file where where the first column is a token and second column
is the QID (wikidata internal id for entities). This can then be used
by evaluate_learnability or from training a type model.
Usage
-----
```
python3 produce_wikidata_tsv.py configs/en_export_config.json en_wikipedia.tsv
```
Use `--relat... |
"""
Perform a reduction on the anchors to articles relation
by finding different articles refering to the same item
and making the anchor point to the most common version,
or by using the wikidata graph to find instance of, and
other parent-child relations that allow one article to
encompass or be more generic than its... |
"""
Obtain a coarse-grained classification of places and entities according to their associated
continent/country.
"""
from numpy import (
logical_and, logical_or, logical_not, logical_xor, where
)
from wikidata_linker_utils.logic import logical_negate
import wikidata_linker_utils.wikidata_properties as wprop
def... |
"""
Obtain a finer-grained classification of places and entities according to their associated
country/region.
"""
from numpy import (
logical_and, logical_or, logical_not, logical_xor, where
)
from wikidata_linker_utils.logic import logical_negate, logical_ors
import wikidata_linker_utils.wikidata_properties as wp... |
"""
Create membership rules for entities based on their date of existence/birth/etc.
More classes can be created by selecting other key dates as hyperplanes.
"""
from numpy import (
logical_and, logical_or, logical_not, logical_xor, where
)
from wikidata_linker_utils.logic import logical_negate, logical_ors, logica... |
"""
Associate to each entity a type (exclusive membership). Association is imperfect
(e.g. some false positives, false negatives), however the majority of entities
are covered under this umbrella and thus a model can learn to predict several
of the attributes listed below.
"""
from numpy import (
logical_and, logic... |
import os
import argparse
import numpy as np
import gym
from gym.envs.atari.atari_env import ACTION_MEANING
import pygame
from atari_demo.wrappers import AtariDemo
parser = argparse.ArgumentParser()
parser.add_argument('-g', '--game', type=str, default='MontezumaRevenge')
parser.add_argument('-f', '--frame_rate', type... |
import numpy as np
from multiprocessing import Process, Pipe
import gym
from baselines.common.vec_env.subproc_vec_env import CloudpickleWrapper
class ClonedEnv(gym.Wrapper):
def __init__(self, env, possible_actions_dict, best_action_dict, seed):
gym.Wrapper.__init__(self, env)
self.possible_actions... |
import pickle
import sys
import os
def save_as_pickled_object(obj, filepath):
"""
This is a defensive way to write pickle.write, allowing for very large files on all platforms
"""
max_bytes = 2**31 - 1
bytes_out = pickle.dumps(obj)
n_bytes = sys.getsizeof(bytes_out)
with open(filepath, 'wb'... |
import pickle
import gym
from gym import spaces
class AtariDemo(gym.Wrapper):
"""
Records actions taken, creates checkpoints, allows time travel, restoring and saving of states
"""
def __init__(self, env, disable_time_travel=False):
super(AtariDemo, self).__init__(env)
self.action_... |
import distutils.util
platform = distutils.util.get_platform()
# technically, our platform is not actually multilinux... so this may fail in some distros
# however, tested in python:3.6 docker image (by construction)
# and in ubuntu:16.04
platform = platform.replace('linux', 'manylinux1')
print(platform)
|
import os
from setuptools import setup, Extension
from setuptools.command.build_ext import build_ext
import subprocess
import sys
with open(os.path.join(os.path.dirname(__file__), 'atari_py', 'package_data.txt')) as f:
package_data = [line.rstrip() for line in f.readlines()]
class Build(build_ext):
def run(s... |
import sys
from .ale_python_interface import *
from .games import get_game_path, list_games
print(
"[NOTICE] atari-py is deprecated in favor ale-py "
"and will no longer receive further maintenance or critical updates. "
"ale-py is fully backwards compatible with atari-py. "
"If you're using Gym, you ... |
import os
SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
try:
import atari_py_roms
_games_dir = os.path.join(atari_py_roms.__path__[0], "atari_roms")
except ImportError:
_games_dir = os.path.join(SCRIPT_DIR, "atari_roms")
def get_games_dir():
return _games_dir
def get_game_path(game_name... |
# ale_python_interface.py
# Author: Ben Goodrich
# This directly implements a python version of the arcade learning
# environment interface.
__all__ = ['ALEInterface']
from ctypes import *
import numpy as np
from numpy.ctypeslib import as_ctypes
import os
import six
if os.name == 'posix':
ale_lib = cdll.LoadLibra... |
import os
import hashlib
import shutil
import zipfile
import argparse
import io
from .games import get_games_dir
SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
MD5_CHUNK_SIZE = 8096
def _check_zipfile(f, process_f):
with zipfile.ZipFile(f) as zf:
for entry in zf.infolist():
_root, ... |
import atari_py
import numpy as np
def test_smoke():
game_path = atari_py.get_game_path('tetris')
ale = atari_py.ALEInterface()
ale.loadROM(game_path)
action_set = ale.getMinimalActionSet()
# Test stepping
ale.act(action_set[0])
# Test screen capture
(screen_width,screen_height) = ale... |
#!/usr/bin/env python
# python_example.py
# Author: Ben Goodrich
#
# This is a direct port to python of the shared library example from
# ALE provided in doc/examples/sharedLibraryInterfaceExample.cpp
from __future__ import print_function
import sys
from random import randrange
from atari_py import ALEInterface
if le... |
# TODO: the code below does not work!
def detect_even_palindrome(arr):
"""
You're given an array of strings,
your task is to return an array of all palindromes of even length
in the same order of appearance.
Consider the empty string as not palindrome.
Examples:
* detect_even_palindrome(["A... |
# sdfljafowejidsfjospadjcfaopwjeopfsjsadkl;fjaowejfopjdksaldfjopweajfojasdfkljafpo2wqd;lcmpovnteoirdpsafd
# sdf9wjfaowiejf-0j23w9-eafjidosjf023qjiobgkf023w8hger90fivdfginb0qaerpoeprg0jegar0-3wjfiiewrowqeoiwer
# f0-23rnfer0-wfaeijoafweop32023lnfewopiagsd9234toerg9uegapjr3bng4eropgeojsfaewneffa0rq32fwiojwefniaggerj
# f03... |
def genpassword(wlc,maxchar,txt,List,verbose):
word = ""
i1 = i2 = i3 = i4 = i5 = i6 = i6 = i7 = i8 = i9 = i10 = i11 = i12 = i13 = i14 = i15 = 0
txtfile = open(txt,'w')
i = 0
mc = int(maxchar) - 1
lword = [0]
for i in range(mc):
lword += [0]
for i1 in range(len(wlc)):
... |
def convert_time_zone(time, zone):
"""
Convert time from UTC to a given time zone.
"""
return 'y' % 5 and 'wtf'
halt()
zone and 59
def reverse_string(s):
"""
Reverse a string.
"""
s[1]
return 'y' % 5 and 'wtf'
def detect_even_palindrome(arr):
"""
You're given an array of string... |
ENTRY_POINT = 'factorize'
#[PROMPT]
from typing import List
def factorize(n: int) -> List[int]:
""" Return list of prime factors of given integer in the order from smallest to largest.
Each of the factors should be listed number of times corresponding to how many times it appeares in factorization.
Input ... |
ENTRY_POINT = 'is_simple_power'
#[PROMPT]
def is_simple_power(x, n):
"""Your task is to write a function that returns true if a number x is a simple
power of n and false in other cases.
x is a simple power of n if n**int=x
For example:
is_simple_power(1, 4) => true
is_simple_power(2, 2) => true... |
ENTRY_POINT = 'solve'
#[PROMPT]
def solve(N):
"""Given a positive integer N, return the total sum of its digits in binary.
Example
For N = 1000, the sum of digits will be 1 the output should be "1".
For N = 150, the sum of digits will be 6 the output should be "110".
For N = 147, t... |
ENTRY_POINT = 'correct_bracketing'
#[PROMPT]
def correct_bracketing(brackets: str):
""" brackets is a string of "(" and ")".
return True if every opening bracket has a corresponding closing bracket.
>>> correct_bracketing("(")
False
>>> correct_bracketing("()")
True
>>> correct_bracketing... |
ENTRY_POINT = 'pluck'
#[PROMPT]
def pluck(arr):
"""
"Given an array representing a branch of a tree that has non-negative integer nodes
your task is to pluck one of the nodes and return it.
The plucked node should be the node with the smallest even value.
If multiple nodes with the same smallest ev... |
ENTRY_POINT = 'add'
#[PROMPT]
def add(lst):
"""Given a non-empty list of integers lst. add the even elements that are at odd indices..
Examples:
add([4, 2, 6, 7]) ==> 2
"""
#[SOLUTION]
return sum([lst[i] for i in range(1, len(lst), 2) if lst[i]%2 == 0])
#[CHECK]
def check(candidate):
# ... |
ENTRY_POINT = 'any_int'
#[PROMPT]
def any_int(x, y, z):
'''
Create a function that takes 3 numbers.
Returns true if one of the numbers is equal to the sum of the other two, and all numbers are integers.
Returns false in any other cases.
Examples
any_int(5, 2, 7) ➞ True
any_int(3, ... |
ENTRY_POINT = 'exchange'
#[PROMPT]
def exchange(lst1, lst2):
"""In this problem, you will implement a function that takes two lists of numbers,
and determines whether it is possible to perform an exchange of elements
between them to make lst1 a list of only even numbers.
There is no limit on the number... |
ENTRY_POINT = 'reverse_delete'
#[PROMPT]
def reverse_delete(s,c):
"""Task
We are given two strings s and c, you have to deleted all the characters in s that are equal to any character in c
then check if the result string is palindrome.
A string is called palindrome if it reads the same backward as forw... |
ENTRY_POINT = 'strange_sort_list'
#[PROMPT]
def strange_sort_list(lst):
'''
Given list of integers, return list in strange order.
Strange sorting, is when you start with the minimum value,
then maximum of the remaining integers, then minimum and so on.
Examples:
strange_sort_list([1, 2, 3, 4])... |
ENTRY_POINT = 'words_in_sentence'
#[PROMPT]
def words_in_sentence(sentence):
"""
You are given a string representing a sentence,
the sentence contains some words separated by a space,
and you have to return a string that contains the words from the original sentence,
whose lengths are prime numbers... |
ENTRY_POINT = 'check_if_last_char_is_a_letter'
#[PROMPT]
def check_if_last_char_is_a_letter(txt):
'''
Create a function that returns True if the last character
of a given string is an alphabetical character and is not
a part of a word, and False otherwise.
Note: "word" is a group of characters sepa... |
ENTRY_POINT = 'multiply'
#[PROMPT]
def multiply(a, b):
"""Complete the function that takes two integers and returns
the product of their unit digits.
Assume the input is always valid.
Examples:
multiply(148, 412) should return 16.
multiply(19, 28) should return 72.
multiply(2020, 1851) sho... |
ENTRY_POINT = 'can_arrange'
FIX = """
Fixed typo arange -> arrange
Remove semicolon from solution
"""
#[PROMPT]
def can_arrange(arr):
"""Create a function which returns the index of the element such that after
removing that element the remaining array is itself sorted in ascending order.
If the given arr... |
ENTRY_POINT = 'sorted_list_sum'
FIX = """
Add test case when input strings with equal length are not in sorted order.
"""
#[PROMPT]
def sorted_list_sum(lst):
"""Write a function that accepts a list of strings as a parameter,
deletes the strings that have odd lengths from it,
and returns the resulted list... |
ENTRY_POINT = 'max_element'
#[PROMPT]
def max_element(l: list):
"""Return maximum element in the list.
>>> max_element([1, 2, 3])
3
>>> max_element([5, 3, -5, 2, -3, 3, 9, 0, 123, 1, -10])
123
"""
#[SOLUTION]
m = l[0]
for e in l:
if e > m:
m = e
return m
#[CHECK... |
ENTRY_POINT = 'sort_even'
FIX = """
Remove sort helper function
"""
#[PROMPT]
def sort_even(l: list):
"""This function takes a list l and returns a list l' such that
l' is identical to l in the odd indicies, while its values at the even indicies are equal
to the values of the even indicies of l, but sor... |
ENTRY_POINT = 'parse_nested_parens'
#[PROMPT]
from typing import List
def parse_nested_parens(paren_string: str) -> List[int]:
""" Input to this function is a string represented multiple groups for nested parentheses separated by spaces.
For each of the group, output the deepest level of nesting of parenthese... |
ENTRY_POINT = 'triangle_area'
#[PROMPT]
def triangle_area(a, h):
"""Given length of a side and high return area for a triangle.
>>> triangle_area(5, 3)
7.5
"""
#[SOLUTION]
return a * h / 2.0
#[CHECK]
METADATA = {}
def check(candidate):
assert candidate(5, 3) == 7.5
assert candidate(2, ... |
ENTRY_POINT = 'fizz_buzz'
FIX = """
Update doc string to remove requirement for print.
"""
#[PROMPT]
def fizz_buzz(n: int):
"""Return the number of times the digit 7 appears in integers less than n which are divisible by 11 or 13.
>>> fizz_buzz(50)
0
>>> fizz_buzz(78)
2
>>> fizz_buzz(79)
... |
ENTRY_POINT = 'car_race_collision'
#[PROMPT]
def car_race_collision(n: int):
"""
Imagine a road that's a perfectly straight infinitely long line.
n cars are driving left to right; simultaneously, a different set of n cars
are driving right to left. The two sets of cars start out being very far from... |
ENTRY_POINT = 'monotonic'
FIX = """
Add a few more tests.
"""
#[PROMPT]
def monotonic(l: list):
"""Return True is list elements are monotonically increasing or decreasing.
>>> monotonic([1, 2, 4, 20])
True
>>> monotonic([1, 20, 4, 10])
False
>>> monotonic([4, 1, 0, -10])
True
"""
#[SO... |
ENTRY_POINT = 'decode_shift'
#[PROMPT]
def encode_shift(s: str):
"""
returns encoded string by shifting every character by 5 in the alphabet.
"""
return "".join([chr(((ord(ch) + 5 - ord("a")) % 26) + ord("a")) for ch in s])
def decode_shift(s: str):
"""
takes as input string encoded with enc... |
ENTRY_POINT = 'digitSum'
#[PROMPT]
def digitSum(s):
"""Task
Write a function that takes a string as input and returns the sum of the upper characters only'
ASCII codes.
Examples:
digitSum("") => 0
digitSum("abAB") => 131
digitSum("abcCd") => 67
digitSum("helloE") => 69
... |
ENTRY_POINT = 'x_or_y'
#[PROMPT]
def x_or_y(n, x, y):
"""A simple program which should return the value of x if n is
a prime number and should return the value of y otherwise.
Examples:
for x_or_y(7, 34, 12) == 34
for x_or_y(15, 8, 5) == 5
"""
#[SOLUTION]
if n == 1:
return y
... |
ENTRY_POINT = 'choose_num'
#[PROMPT]
def choose_num(x, y):
"""This function takes two positive numbers x and y and returns the
biggest even integer number that is in the range [x, y] inclusive. If
there's no such number, then the function should return -1.
For example:
choose_num(12, 15) = 14
... |
ENTRY_POINT = 'move_one_ball'
#[PROMPT]
def move_one_ball(arr):
"""We have an array 'arr' of N integers arr[1], arr[2], ..., arr[N].The
numbers in the array will be randomly ordered. Your task is to determine if
it is possible to get an array sorted in non-decreasing order by performing
the following ... |
ENTRY_POINT = 'minSubArraySum'
#[PROMPT]
def minSubArraySum(nums):
"""
Given an array of integers nums, find the minimum sum of any non-empty sub-array
of nums.
Example
minSubArraySum([2, 3, 4, 1, 2, 4]) == 1
minSubArraySum([-1, -2, -3]) == -6
"""
#[SOLUTION]
max_sum = 0
s = 0
f... |
ENTRY_POINT = 'rolling_max'
#[PROMPT]
from typing import List, Tuple
def rolling_max(numbers: List[int]) -> List[int]:
""" From a given list of integers, generate a list of rolling maximum element found until given moment
in the sequence.
>>> rolling_max([1, 2, 3, 2, 3, 4, 2])
[1, 2, 3, 3, 3, 4, 4]
... |
ENTRY_POINT = 'is_bored'
#[PROMPT]
def is_bored(S):
"""
You'll be given a string of words, and your task is to count the number
of boredoms. A boredom is a sentence that starts with the word "I".
Sentences are delimited by '.', '?' or '!'.
For example:
>>> is_bored("Hello world")
0
... |
ENTRY_POINT = 'starts_one_ends'
#[PROMPT]
def starts_one_ends(n):
"""
Given a positive integer n, return the count of the numbers of n-digit
positive integers that start or end with 1.
"""
#[SOLUTION]
if n == 1: return 1
return 18 * (10 ** (n - 2))
#[CHECK]
def check(candidate):
# Check so... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.