commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5a49e5bea67465528b1e644a98da282c66e9c35f
|
tests/fixtures/postgres.py
|
tests/fixtures/postgres.py
|
import pytest
from sqlalchemy import text
from sqlalchemy.exc import ProgrammingError
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlalchemy.ext.asyncio import AsyncSession
from virtool.models import Base
@pytest.fixture(scope="function")
async def engine():
engine = create_async_engine("postgresql+asyncpg://virtool:virtool@postgres/virtool", isolation_level="AUTOCOMMIT")
async with engine.connect() as conn:
try:
await conn.execute(text("CREATE DATABASE test"))
except ProgrammingError:
pass
return create_async_engine("postgresql+asyncpg://virtool:virtool@postgres/test")
@pytest.fixture(scope="function")
async def dbsession(engine, loop):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await conn.run_sync(Base.metadata.create_all)
session = AsyncSession(bind=engine)
yield session
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await session.close()
|
import pytest
from sqlalchemy import text
from sqlalchemy.exc import ProgrammingError
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlalchemy.ext.asyncio import AsyncSession
from virtool.models import Base
@pytest.fixture(scope="function")
async def test_engine():
engine = create_async_engine("postgresql+asyncpg://virtool:virtool@postgres/virtool", isolation_level="AUTOCOMMIT")
async with engine.connect() as conn:
try:
await conn.execute(text("CREATE DATABASE test"))
except ProgrammingError:
pass
return create_async_engine("postgresql+asyncpg://virtool:virtool@postgres/test")
@pytest.fixture(scope="function")
async def test_session(test_engine, loop):
async with test_engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await conn.run_sync(Base.metadata.create_all)
session = AsyncSession(bind=test_engine)
yield session
async with test_engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await session.close()
|
Rename 'engine' fixture to 'test_engine'
|
Rename 'engine' fixture to 'test_engine'
|
Python
|
mit
|
igboyes/virtool,virtool/virtool,virtool/virtool,igboyes/virtool
|
import pytest
from sqlalchemy import text
from sqlalchemy.exc import ProgrammingError
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlalchemy.ext.asyncio import AsyncSession
from virtool.models import Base
@pytest.fixture(scope="function")
async def engine():
engine = create_async_engine("postgresql+asyncpg://virtool:virtool@postgres/virtool", isolation_level="AUTOCOMMIT")
async with engine.connect() as conn:
try:
await conn.execute(text("CREATE DATABASE test"))
except ProgrammingError:
pass
return create_async_engine("postgresql+asyncpg://virtool:virtool@postgres/test")
@pytest.fixture(scope="function")
async def dbsession(engine, loop):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await conn.run_sync(Base.metadata.create_all)
session = AsyncSession(bind=engine)
yield session
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await session.close()Rename 'engine' fixture to 'test_engine'
|
import pytest
from sqlalchemy import text
from sqlalchemy.exc import ProgrammingError
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlalchemy.ext.asyncio import AsyncSession
from virtool.models import Base
@pytest.fixture(scope="function")
async def test_engine():
engine = create_async_engine("postgresql+asyncpg://virtool:virtool@postgres/virtool", isolation_level="AUTOCOMMIT")
async with engine.connect() as conn:
try:
await conn.execute(text("CREATE DATABASE test"))
except ProgrammingError:
pass
return create_async_engine("postgresql+asyncpg://virtool:virtool@postgres/test")
@pytest.fixture(scope="function")
async def test_session(test_engine, loop):
async with test_engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await conn.run_sync(Base.metadata.create_all)
session = AsyncSession(bind=test_engine)
yield session
async with test_engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await session.close()
|
<commit_before>import pytest
from sqlalchemy import text
from sqlalchemy.exc import ProgrammingError
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlalchemy.ext.asyncio import AsyncSession
from virtool.models import Base
@pytest.fixture(scope="function")
async def engine():
engine = create_async_engine("postgresql+asyncpg://virtool:virtool@postgres/virtool", isolation_level="AUTOCOMMIT")
async with engine.connect() as conn:
try:
await conn.execute(text("CREATE DATABASE test"))
except ProgrammingError:
pass
return create_async_engine("postgresql+asyncpg://virtool:virtool@postgres/test")
@pytest.fixture(scope="function")
async def dbsession(engine, loop):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await conn.run_sync(Base.metadata.create_all)
session = AsyncSession(bind=engine)
yield session
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await session.close()<commit_msg>Rename 'engine' fixture to 'test_engine'<commit_after>
|
import pytest
from sqlalchemy import text
from sqlalchemy.exc import ProgrammingError
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlalchemy.ext.asyncio import AsyncSession
from virtool.models import Base
@pytest.fixture(scope="function")
async def test_engine():
engine = create_async_engine("postgresql+asyncpg://virtool:virtool@postgres/virtool", isolation_level="AUTOCOMMIT")
async with engine.connect() as conn:
try:
await conn.execute(text("CREATE DATABASE test"))
except ProgrammingError:
pass
return create_async_engine("postgresql+asyncpg://virtool:virtool@postgres/test")
@pytest.fixture(scope="function")
async def test_session(test_engine, loop):
async with test_engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await conn.run_sync(Base.metadata.create_all)
session = AsyncSession(bind=test_engine)
yield session
async with test_engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await session.close()
|
import pytest
from sqlalchemy import text
from sqlalchemy.exc import ProgrammingError
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlalchemy.ext.asyncio import AsyncSession
from virtool.models import Base
@pytest.fixture(scope="function")
async def engine():
engine = create_async_engine("postgresql+asyncpg://virtool:virtool@postgres/virtool", isolation_level="AUTOCOMMIT")
async with engine.connect() as conn:
try:
await conn.execute(text("CREATE DATABASE test"))
except ProgrammingError:
pass
return create_async_engine("postgresql+asyncpg://virtool:virtool@postgres/test")
@pytest.fixture(scope="function")
async def dbsession(engine, loop):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await conn.run_sync(Base.metadata.create_all)
session = AsyncSession(bind=engine)
yield session
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await session.close()Rename 'engine' fixture to 'test_engine'import pytest
from sqlalchemy import text
from sqlalchemy.exc import ProgrammingError
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlalchemy.ext.asyncio import AsyncSession
from virtool.models import Base
@pytest.fixture(scope="function")
async def test_engine():
engine = create_async_engine("postgresql+asyncpg://virtool:virtool@postgres/virtool", isolation_level="AUTOCOMMIT")
async with engine.connect() as conn:
try:
await conn.execute(text("CREATE DATABASE test"))
except ProgrammingError:
pass
return create_async_engine("postgresql+asyncpg://virtool:virtool@postgres/test")
@pytest.fixture(scope="function")
async def test_session(test_engine, loop):
async with test_engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await conn.run_sync(Base.metadata.create_all)
session = AsyncSession(bind=test_engine)
yield session
async with test_engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await session.close()
|
<commit_before>import pytest
from sqlalchemy import text
from sqlalchemy.exc import ProgrammingError
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlalchemy.ext.asyncio import AsyncSession
from virtool.models import Base
@pytest.fixture(scope="function")
async def engine():
engine = create_async_engine("postgresql+asyncpg://virtool:virtool@postgres/virtool", isolation_level="AUTOCOMMIT")
async with engine.connect() as conn:
try:
await conn.execute(text("CREATE DATABASE test"))
except ProgrammingError:
pass
return create_async_engine("postgresql+asyncpg://virtool:virtool@postgres/test")
@pytest.fixture(scope="function")
async def dbsession(engine, loop):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await conn.run_sync(Base.metadata.create_all)
session = AsyncSession(bind=engine)
yield session
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await session.close()<commit_msg>Rename 'engine' fixture to 'test_engine'<commit_after>import pytest
from sqlalchemy import text
from sqlalchemy.exc import ProgrammingError
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlalchemy.ext.asyncio import AsyncSession
from virtool.models import Base
@pytest.fixture(scope="function")
async def test_engine():
engine = create_async_engine("postgresql+asyncpg://virtool:virtool@postgres/virtool", isolation_level="AUTOCOMMIT")
async with engine.connect() as conn:
try:
await conn.execute(text("CREATE DATABASE test"))
except ProgrammingError:
pass
return create_async_engine("postgresql+asyncpg://virtool:virtool@postgres/test")
@pytest.fixture(scope="function")
async def test_session(test_engine, loop):
async with test_engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await conn.run_sync(Base.metadata.create_all)
session = AsyncSession(bind=test_engine)
yield session
async with test_engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await session.close()
|
7f59bf7b24caf0ae92abadae9427d0293f4a39b7
|
longshot.py
|
longshot.py
|
#!/usr/local/bin/python
__version__ = '0.1'
HOME_URL = 'https://raw.githubusercontent.com/ftobia/longshot/develop/longshot.py'
def upgrade():
backup_self()
download_and_overwrite()
restart()
def backup_self():
import shutil
new_name = __file__ + '.bak'
shutil.copy(__file__, new_name)
def download_and_overwrite():
import urllib2
response = urllib2.urlopen(HOME_URL)
with open(__file__, 'w') as f:
f.write(response.read())
def restart():
import os
os.execvp(__name__, [])
if __name__ == '__main__':
print __version__
upgrade()
|
#!/usr/local/bin/python
__version__ = '0.1'
HOME_URL = 'https://raw.githubusercontent.com/ftobia/longshot/develop/longshot.py'
def upgrade():
backup_self()
download_and_overwrite()
restart()
def backup_self():
import shutil
new_name = __file__ + '.bak'
shutil.copy(__file__, new_name)
def download_and_overwrite():
import urllib2
response = urllib2.urlopen(HOME_URL)
with open(__file__, 'w') as f:
f.write(response.read())
def restart():
import os, sys
os.execvp(sys.executable, sys.argv)
if __name__ == '__main__':
print __version__
upgrade()
|
Call execvp correctly (I hope).
|
Call execvp correctly (I hope).
|
Python
|
bsd-3-clause
|
ftobia/longshot
|
#!/usr/local/bin/python
__version__ = '0.1'
HOME_URL = 'https://raw.githubusercontent.com/ftobia/longshot/develop/longshot.py'
def upgrade():
backup_self()
download_and_overwrite()
restart()
def backup_self():
import shutil
new_name = __file__ + '.bak'
shutil.copy(__file__, new_name)
def download_and_overwrite():
import urllib2
response = urllib2.urlopen(HOME_URL)
with open(__file__, 'w') as f:
f.write(response.read())
def restart():
import os
os.execvp(__name__, [])
if __name__ == '__main__':
print __version__
upgrade()
Call execvp correctly (I hope).
|
#!/usr/local/bin/python
__version__ = '0.1'
HOME_URL = 'https://raw.githubusercontent.com/ftobia/longshot/develop/longshot.py'
def upgrade():
backup_self()
download_and_overwrite()
restart()
def backup_self():
import shutil
new_name = __file__ + '.bak'
shutil.copy(__file__, new_name)
def download_and_overwrite():
import urllib2
response = urllib2.urlopen(HOME_URL)
with open(__file__, 'w') as f:
f.write(response.read())
def restart():
import os, sys
os.execvp(sys.executable, sys.argv)
if __name__ == '__main__':
print __version__
upgrade()
|
<commit_before>#!/usr/local/bin/python
__version__ = '0.1'
HOME_URL = 'https://raw.githubusercontent.com/ftobia/longshot/develop/longshot.py'
def upgrade():
backup_self()
download_and_overwrite()
restart()
def backup_self():
import shutil
new_name = __file__ + '.bak'
shutil.copy(__file__, new_name)
def download_and_overwrite():
import urllib2
response = urllib2.urlopen(HOME_URL)
with open(__file__, 'w') as f:
f.write(response.read())
def restart():
import os
os.execvp(__name__, [])
if __name__ == '__main__':
print __version__
upgrade()
<commit_msg>Call execvp correctly (I hope).<commit_after>
|
#!/usr/local/bin/python
__version__ = '0.1'
HOME_URL = 'https://raw.githubusercontent.com/ftobia/longshot/develop/longshot.py'
def upgrade():
backup_self()
download_and_overwrite()
restart()
def backup_self():
import shutil
new_name = __file__ + '.bak'
shutil.copy(__file__, new_name)
def download_and_overwrite():
import urllib2
response = urllib2.urlopen(HOME_URL)
with open(__file__, 'w') as f:
f.write(response.read())
def restart():
import os, sys
os.execvp(sys.executable, sys.argv)
if __name__ == '__main__':
print __version__
upgrade()
|
#!/usr/local/bin/python
__version__ = '0.1'
HOME_URL = 'https://raw.githubusercontent.com/ftobia/longshot/develop/longshot.py'
def upgrade():
backup_self()
download_and_overwrite()
restart()
def backup_self():
import shutil
new_name = __file__ + '.bak'
shutil.copy(__file__, new_name)
def download_and_overwrite():
import urllib2
response = urllib2.urlopen(HOME_URL)
with open(__file__, 'w') as f:
f.write(response.read())
def restart():
import os
os.execvp(__name__, [])
if __name__ == '__main__':
print __version__
upgrade()
Call execvp correctly (I hope).#!/usr/local/bin/python
__version__ = '0.1'
HOME_URL = 'https://raw.githubusercontent.com/ftobia/longshot/develop/longshot.py'
def upgrade():
backup_self()
download_and_overwrite()
restart()
def backup_self():
import shutil
new_name = __file__ + '.bak'
shutil.copy(__file__, new_name)
def download_and_overwrite():
import urllib2
response = urllib2.urlopen(HOME_URL)
with open(__file__, 'w') as f:
f.write(response.read())
def restart():
import os, sys
os.execvp(sys.executable, sys.argv)
if __name__ == '__main__':
print __version__
upgrade()
|
<commit_before>#!/usr/local/bin/python
__version__ = '0.1'
HOME_URL = 'https://raw.githubusercontent.com/ftobia/longshot/develop/longshot.py'
def upgrade():
backup_self()
download_and_overwrite()
restart()
def backup_self():
import shutil
new_name = __file__ + '.bak'
shutil.copy(__file__, new_name)
def download_and_overwrite():
import urllib2
response = urllib2.urlopen(HOME_URL)
with open(__file__, 'w') as f:
f.write(response.read())
def restart():
import os
os.execvp(__name__, [])
if __name__ == '__main__':
print __version__
upgrade()
<commit_msg>Call execvp correctly (I hope).<commit_after>#!/usr/local/bin/python
__version__ = '0.1'
HOME_URL = 'https://raw.githubusercontent.com/ftobia/longshot/develop/longshot.py'
def upgrade():
backup_self()
download_and_overwrite()
restart()
def backup_self():
import shutil
new_name = __file__ + '.bak'
shutil.copy(__file__, new_name)
def download_and_overwrite():
import urllib2
response = urllib2.urlopen(HOME_URL)
with open(__file__, 'w') as f:
f.write(response.read())
def restart():
import os, sys
os.execvp(sys.executable, sys.argv)
if __name__ == '__main__':
print __version__
upgrade()
|
3131ea5c8dd41d18192f685e61c1bc8987038193
|
vcs_info_panel/tests/test_clients/test_git.py
|
vcs_info_panel/tests/test_clients/test_git.py
|
import subprocess
from unittest.mock import patch
from django.test import TestCase
from vcs_info_panel.clients.git import GitClient
class GitClientTestCase(TestCase):
def setUp(self):
self.client = GitClient()
def _test_called_check_output(self, commands):
with patch('subprocess.check_output') as _check_output:
_check_output.assert_called_with(commands)
def test_base_command(self):
self.assertEqual(self.client.base_command, 'git')
def test_is_repository_with_repository(self):
with patch('subprocess.check_output') as _check_output:
_check_output.return_value = b'true'
self.assertEqual(self.client.is_repository(), True)
_check_output.assert_called_once_with(['git', 'rev-parse', '--is-inside-work-tree'])
def _patch_without_repository(self, func):
with patch('subprocess.check_output') as _check_output:
_check_output.side_effect = subprocess.CalledProcessError(128,
['git', 'rev-parse', '--is-inside-work-tree'],
'fatal: Not a git repository (or any of the parent directories): .git')
def test_is_repository_without_repository(self):
def _func(_check_output):
self.assertEqual(self.client.is_repository(), False)
_check_output.assert_called_once_with(['git', 'rev-parse', '--is-inside-work-tree'])
self._patch_without_repository(_func)
|
import subprocess
from unittest.mock import patch
from django.test import TestCase
from vcs_info_panel.clients.git import GitClient
def without_git_repository(func):
def inner(*args, **kwargs):
with patch('subprocess.check_output') as _check_output:
_check_output.side_effect = subprocess.CalledProcessError(128,
['git', 'rev-parse', '--is-inside-work-tree'],
'fatal: Not a git repository (or any of the parent directories): .git')
return func(*args, **kwargs)
return inner
class GitClientTestCase(TestCase):
def setUp(self):
self.client = GitClient()
def _test_called_check_output(self, commands):
with patch('subprocess.check_output') as _check_output:
_check_output.assert_called_with(commands)
def test_base_command(self):
self.assertEqual(self.client.base_command, 'git')
def test_is_repository_with_repository(self):
with patch('subprocess.check_output') as _check_output:
_check_output.return_value = b'true'
self.assertEqual(self.client.is_repository(), True)
_check_output.assert_called_once_with(['git', 'rev-parse', '--is-inside-work-tree'])
@without_git_repository
def test_is_repository_without_repository(self):
self.assertEqual(self.client.is_repository(), True)
|
Use decorator to patch git repository is not exist
|
Use decorator to patch git repository is not exist
|
Python
|
mit
|
giginet/django-debug-toolbar-vcs-info,giginet/django-debug-toolbar-vcs-info
|
import subprocess
from unittest.mock import patch
from django.test import TestCase
from vcs_info_panel.clients.git import GitClient
class GitClientTestCase(TestCase):
def setUp(self):
self.client = GitClient()
def _test_called_check_output(self, commands):
with patch('subprocess.check_output') as _check_output:
_check_output.assert_called_with(commands)
def test_base_command(self):
self.assertEqual(self.client.base_command, 'git')
def test_is_repository_with_repository(self):
with patch('subprocess.check_output') as _check_output:
_check_output.return_value = b'true'
self.assertEqual(self.client.is_repository(), True)
_check_output.assert_called_once_with(['git', 'rev-parse', '--is-inside-work-tree'])
def _patch_without_repository(self, func):
with patch('subprocess.check_output') as _check_output:
_check_output.side_effect = subprocess.CalledProcessError(128,
['git', 'rev-parse', '--is-inside-work-tree'],
'fatal: Not a git repository (or any of the parent directories): .git')
def test_is_repository_without_repository(self):
def _func(_check_output):
self.assertEqual(self.client.is_repository(), False)
_check_output.assert_called_once_with(['git', 'rev-parse', '--is-inside-work-tree'])
self._patch_without_repository(_func)
Use decorator to patch git repository is not exist
|
import subprocess
from unittest.mock import patch
from django.test import TestCase
from vcs_info_panel.clients.git import GitClient
def without_git_repository(func):
def inner(*args, **kwargs):
with patch('subprocess.check_output') as _check_output:
_check_output.side_effect = subprocess.CalledProcessError(128,
['git', 'rev-parse', '--is-inside-work-tree'],
'fatal: Not a git repository (or any of the parent directories): .git')
return func(*args, **kwargs)
return inner
class GitClientTestCase(TestCase):
def setUp(self):
self.client = GitClient()
def _test_called_check_output(self, commands):
with patch('subprocess.check_output') as _check_output:
_check_output.assert_called_with(commands)
def test_base_command(self):
self.assertEqual(self.client.base_command, 'git')
def test_is_repository_with_repository(self):
with patch('subprocess.check_output') as _check_output:
_check_output.return_value = b'true'
self.assertEqual(self.client.is_repository(), True)
_check_output.assert_called_once_with(['git', 'rev-parse', '--is-inside-work-tree'])
@without_git_repository
def test_is_repository_without_repository(self):
self.assertEqual(self.client.is_repository(), True)
|
<commit_before>import subprocess
from unittest.mock import patch
from django.test import TestCase
from vcs_info_panel.clients.git import GitClient
class GitClientTestCase(TestCase):
def setUp(self):
self.client = GitClient()
def _test_called_check_output(self, commands):
with patch('subprocess.check_output') as _check_output:
_check_output.assert_called_with(commands)
def test_base_command(self):
self.assertEqual(self.client.base_command, 'git')
def test_is_repository_with_repository(self):
with patch('subprocess.check_output') as _check_output:
_check_output.return_value = b'true'
self.assertEqual(self.client.is_repository(), True)
_check_output.assert_called_once_with(['git', 'rev-parse', '--is-inside-work-tree'])
def _patch_without_repository(self, func):
with patch('subprocess.check_output') as _check_output:
_check_output.side_effect = subprocess.CalledProcessError(128,
['git', 'rev-parse', '--is-inside-work-tree'],
'fatal: Not a git repository (or any of the parent directories): .git')
def test_is_repository_without_repository(self):
def _func(_check_output):
self.assertEqual(self.client.is_repository(), False)
_check_output.assert_called_once_with(['git', 'rev-parse', '--is-inside-work-tree'])
self._patch_without_repository(_func)
<commit_msg>Use decorator to patch git repository is not exist<commit_after>
|
import subprocess
from unittest.mock import patch
from django.test import TestCase
from vcs_info_panel.clients.git import GitClient
def without_git_repository(func):
def inner(*args, **kwargs):
with patch('subprocess.check_output') as _check_output:
_check_output.side_effect = subprocess.CalledProcessError(128,
['git', 'rev-parse', '--is-inside-work-tree'],
'fatal: Not a git repository (or any of the parent directories): .git')
return func(*args, **kwargs)
return inner
class GitClientTestCase(TestCase):
def setUp(self):
self.client = GitClient()
def _test_called_check_output(self, commands):
with patch('subprocess.check_output') as _check_output:
_check_output.assert_called_with(commands)
def test_base_command(self):
self.assertEqual(self.client.base_command, 'git')
def test_is_repository_with_repository(self):
with patch('subprocess.check_output') as _check_output:
_check_output.return_value = b'true'
self.assertEqual(self.client.is_repository(), True)
_check_output.assert_called_once_with(['git', 'rev-parse', '--is-inside-work-tree'])
@without_git_repository
def test_is_repository_without_repository(self):
self.assertEqual(self.client.is_repository(), True)
|
import subprocess
from unittest.mock import patch
from django.test import TestCase
from vcs_info_panel.clients.git import GitClient
class GitClientTestCase(TestCase):
def setUp(self):
self.client = GitClient()
def _test_called_check_output(self, commands):
with patch('subprocess.check_output') as _check_output:
_check_output.assert_called_with(commands)
def test_base_command(self):
self.assertEqual(self.client.base_command, 'git')
def test_is_repository_with_repository(self):
with patch('subprocess.check_output') as _check_output:
_check_output.return_value = b'true'
self.assertEqual(self.client.is_repository(), True)
_check_output.assert_called_once_with(['git', 'rev-parse', '--is-inside-work-tree'])
def _patch_without_repository(self, func):
with patch('subprocess.check_output') as _check_output:
_check_output.side_effect = subprocess.CalledProcessError(128,
['git', 'rev-parse', '--is-inside-work-tree'],
'fatal: Not a git repository (or any of the parent directories): .git')
def test_is_repository_without_repository(self):
def _func(_check_output):
self.assertEqual(self.client.is_repository(), False)
_check_output.assert_called_once_with(['git', 'rev-parse', '--is-inside-work-tree'])
self._patch_without_repository(_func)
Use decorator to patch git repository is not existimport subprocess
from unittest.mock import patch
from django.test import TestCase
from vcs_info_panel.clients.git import GitClient
def without_git_repository(func):
def inner(*args, **kwargs):
with patch('subprocess.check_output') as _check_output:
_check_output.side_effect = subprocess.CalledProcessError(128,
['git', 'rev-parse', '--is-inside-work-tree'],
'fatal: Not a git repository (or any of the parent directories): .git')
return func(*args, **kwargs)
return inner
class GitClientTestCase(TestCase):
def setUp(self):
self.client = GitClient()
def _test_called_check_output(self, commands):
with patch('subprocess.check_output') as _check_output:
_check_output.assert_called_with(commands)
def test_base_command(self):
self.assertEqual(self.client.base_command, 'git')
def test_is_repository_with_repository(self):
with patch('subprocess.check_output') as _check_output:
_check_output.return_value = b'true'
self.assertEqual(self.client.is_repository(), True)
_check_output.assert_called_once_with(['git', 'rev-parse', '--is-inside-work-tree'])
@without_git_repository
def test_is_repository_without_repository(self):
self.assertEqual(self.client.is_repository(), True)
|
<commit_before>import subprocess
from unittest.mock import patch
from django.test import TestCase
from vcs_info_panel.clients.git import GitClient
class GitClientTestCase(TestCase):
def setUp(self):
self.client = GitClient()
def _test_called_check_output(self, commands):
with patch('subprocess.check_output') as _check_output:
_check_output.assert_called_with(commands)
def test_base_command(self):
self.assertEqual(self.client.base_command, 'git')
def test_is_repository_with_repository(self):
with patch('subprocess.check_output') as _check_output:
_check_output.return_value = b'true'
self.assertEqual(self.client.is_repository(), True)
_check_output.assert_called_once_with(['git', 'rev-parse', '--is-inside-work-tree'])
def _patch_without_repository(self, func):
with patch('subprocess.check_output') as _check_output:
_check_output.side_effect = subprocess.CalledProcessError(128,
['git', 'rev-parse', '--is-inside-work-tree'],
'fatal: Not a git repository (or any of the parent directories): .git')
def test_is_repository_without_repository(self):
def _func(_check_output):
self.assertEqual(self.client.is_repository(), False)
_check_output.assert_called_once_with(['git', 'rev-parse', '--is-inside-work-tree'])
self._patch_without_repository(_func)
<commit_msg>Use decorator to patch git repository is not exist<commit_after>import subprocess
from unittest.mock import patch
from django.test import TestCase
from vcs_info_panel.clients.git import GitClient
def without_git_repository(func):
def inner(*args, **kwargs):
with patch('subprocess.check_output') as _check_output:
_check_output.side_effect = subprocess.CalledProcessError(128,
['git', 'rev-parse', '--is-inside-work-tree'],
'fatal: Not a git repository (or any of the parent directories): .git')
return func(*args, **kwargs)
return inner
class GitClientTestCase(TestCase):
def setUp(self):
self.client = GitClient()
def _test_called_check_output(self, commands):
with patch('subprocess.check_output') as _check_output:
_check_output.assert_called_with(commands)
def test_base_command(self):
self.assertEqual(self.client.base_command, 'git')
def test_is_repository_with_repository(self):
with patch('subprocess.check_output') as _check_output:
_check_output.return_value = b'true'
self.assertEqual(self.client.is_repository(), True)
_check_output.assert_called_once_with(['git', 'rev-parse', '--is-inside-work-tree'])
@without_git_repository
def test_is_repository_without_repository(self):
self.assertEqual(self.client.is_repository(), True)
|
c87fb60a13c3f81805d4d446902168656c5e9f6b
|
irc/util.py
|
irc/util.py
|
# from jaraco.util.itertools
def always_iterable(item):
"""
Given an object, always return an iterable. If the item is not
already iterable, return a tuple containing only the item.
>>> always_iterable([1,2,3])
[1, 2, 3]
>>> always_iterable('foo')
('foo',)
>>> always_iterable(None)
(None,)
>>> always_iterable(xrange(10))
xrange(10)
"""
if isinstance(item, basestring) or not hasattr(item, '__iter__'):
item = item,
return item
def total_seconds(td):
"""
Python 2.7 adds a total_seconds method to timedelta objects.
See http://docs.python.org/library/datetime.html#datetime.timedelta.total_seconds
>>> import datetime
>>> total_seconds(datetime.timedelta(hours=24))
86400.0
"""
try:
result = td.total_seconds()
except AttributeError:
seconds = td.seconds + td.days * 24 * 3600
result = (td.microseconds + seconds * 10**6) / 10**6
return result
|
# from jaraco.util.itertools
def always_iterable(item):
"""
Given an object, always return an iterable. If the item is not
already iterable, return a tuple containing only the item.
>>> always_iterable([1,2,3])
[1, 2, 3]
>>> always_iterable('foo')
('foo',)
>>> always_iterable(None)
(None,)
>>> always_iterable(xrange(10))
xrange(10)
"""
if isinstance(item, basestring) or not hasattr(item, '__iter__'):
item = item,
return item
def total_seconds(td):
"""
Python 2.7 adds a total_seconds method to timedelta objects.
See http://docs.python.org/library/datetime.html#datetime.timedelta.total_seconds
>>> import datetime
>>> total_seconds(datetime.timedelta(hours=24))
86400.0
"""
try:
result = td.total_seconds()
except AttributeError:
seconds = td.seconds + td.days * 24 * 3600
result = float((td.microseconds + seconds * 10**6) / 10**6)
return result
|
Use float so test passes on Python 2.6
|
Use float so test passes on Python 2.6
|
Python
|
lgpl-2.1
|
sim0629/irc
|
# from jaraco.util.itertools
def always_iterable(item):
"""
Given an object, always return an iterable. If the item is not
already iterable, return a tuple containing only the item.
>>> always_iterable([1,2,3])
[1, 2, 3]
>>> always_iterable('foo')
('foo',)
>>> always_iterable(None)
(None,)
>>> always_iterable(xrange(10))
xrange(10)
"""
if isinstance(item, basestring) or not hasattr(item, '__iter__'):
item = item,
return item
def total_seconds(td):
"""
Python 2.7 adds a total_seconds method to timedelta objects.
See http://docs.python.org/library/datetime.html#datetime.timedelta.total_seconds
>>> import datetime
>>> total_seconds(datetime.timedelta(hours=24))
86400.0
"""
try:
result = td.total_seconds()
except AttributeError:
seconds = td.seconds + td.days * 24 * 3600
result = (td.microseconds + seconds * 10**6) / 10**6
return result
Use float so test passes on Python 2.6
|
# from jaraco.util.itertools
def always_iterable(item):
"""
Given an object, always return an iterable. If the item is not
already iterable, return a tuple containing only the item.
>>> always_iterable([1,2,3])
[1, 2, 3]
>>> always_iterable('foo')
('foo',)
>>> always_iterable(None)
(None,)
>>> always_iterable(xrange(10))
xrange(10)
"""
if isinstance(item, basestring) or not hasattr(item, '__iter__'):
item = item,
return item
def total_seconds(td):
"""
Python 2.7 adds a total_seconds method to timedelta objects.
See http://docs.python.org/library/datetime.html#datetime.timedelta.total_seconds
>>> import datetime
>>> total_seconds(datetime.timedelta(hours=24))
86400.0
"""
try:
result = td.total_seconds()
except AttributeError:
seconds = td.seconds + td.days * 24 * 3600
result = float((td.microseconds + seconds * 10**6) / 10**6)
return result
|
<commit_before># from jaraco.util.itertools
def always_iterable(item):
"""
Given an object, always return an iterable. If the item is not
already iterable, return a tuple containing only the item.
>>> always_iterable([1,2,3])
[1, 2, 3]
>>> always_iterable('foo')
('foo',)
>>> always_iterable(None)
(None,)
>>> always_iterable(xrange(10))
xrange(10)
"""
if isinstance(item, basestring) or not hasattr(item, '__iter__'):
item = item,
return item
def total_seconds(td):
"""
Python 2.7 adds a total_seconds method to timedelta objects.
See http://docs.python.org/library/datetime.html#datetime.timedelta.total_seconds
>>> import datetime
>>> total_seconds(datetime.timedelta(hours=24))
86400.0
"""
try:
result = td.total_seconds()
except AttributeError:
seconds = td.seconds + td.days * 24 * 3600
result = (td.microseconds + seconds * 10**6) / 10**6
return result
<commit_msg>Use float so test passes on Python 2.6<commit_after>
|
# from jaraco.util.itertools
def always_iterable(item):
"""
Given an object, always return an iterable. If the item is not
already iterable, return a tuple containing only the item.
>>> always_iterable([1,2,3])
[1, 2, 3]
>>> always_iterable('foo')
('foo',)
>>> always_iterable(None)
(None,)
>>> always_iterable(xrange(10))
xrange(10)
"""
if isinstance(item, basestring) or not hasattr(item, '__iter__'):
item = item,
return item
def total_seconds(td):
"""
Python 2.7 adds a total_seconds method to timedelta objects.
See http://docs.python.org/library/datetime.html#datetime.timedelta.total_seconds
>>> import datetime
>>> total_seconds(datetime.timedelta(hours=24))
86400.0
"""
try:
result = td.total_seconds()
except AttributeError:
seconds = td.seconds + td.days * 24 * 3600
result = float((td.microseconds + seconds * 10**6) / 10**6)
return result
|
# from jaraco.util.itertools
def always_iterable(item):
"""
Given an object, always return an iterable. If the item is not
already iterable, return a tuple containing only the item.
>>> always_iterable([1,2,3])
[1, 2, 3]
>>> always_iterable('foo')
('foo',)
>>> always_iterable(None)
(None,)
>>> always_iterable(xrange(10))
xrange(10)
"""
if isinstance(item, basestring) or not hasattr(item, '__iter__'):
item = item,
return item
def total_seconds(td):
"""
Python 2.7 adds a total_seconds method to timedelta objects.
See http://docs.python.org/library/datetime.html#datetime.timedelta.total_seconds
>>> import datetime
>>> total_seconds(datetime.timedelta(hours=24))
86400.0
"""
try:
result = td.total_seconds()
except AttributeError:
seconds = td.seconds + td.days * 24 * 3600
result = (td.microseconds + seconds * 10**6) / 10**6
return result
Use float so test passes on Python 2.6# from jaraco.util.itertools
def always_iterable(item):
"""
Given an object, always return an iterable. If the item is not
already iterable, return a tuple containing only the item.
>>> always_iterable([1,2,3])
[1, 2, 3]
>>> always_iterable('foo')
('foo',)
>>> always_iterable(None)
(None,)
>>> always_iterable(xrange(10))
xrange(10)
"""
if isinstance(item, basestring) or not hasattr(item, '__iter__'):
item = item,
return item
def total_seconds(td):
"""
Python 2.7 adds a total_seconds method to timedelta objects.
See http://docs.python.org/library/datetime.html#datetime.timedelta.total_seconds
>>> import datetime
>>> total_seconds(datetime.timedelta(hours=24))
86400.0
"""
try:
result = td.total_seconds()
except AttributeError:
seconds = td.seconds + td.days * 24 * 3600
result = float((td.microseconds + seconds * 10**6) / 10**6)
return result
|
<commit_before># from jaraco.util.itertools
def always_iterable(item):
"""
Given an object, always return an iterable. If the item is not
already iterable, return a tuple containing only the item.
>>> always_iterable([1,2,3])
[1, 2, 3]
>>> always_iterable('foo')
('foo',)
>>> always_iterable(None)
(None,)
>>> always_iterable(xrange(10))
xrange(10)
"""
if isinstance(item, basestring) or not hasattr(item, '__iter__'):
item = item,
return item
def total_seconds(td):
"""
Python 2.7 adds a total_seconds method to timedelta objects.
See http://docs.python.org/library/datetime.html#datetime.timedelta.total_seconds
>>> import datetime
>>> total_seconds(datetime.timedelta(hours=24))
86400.0
"""
try:
result = td.total_seconds()
except AttributeError:
seconds = td.seconds + td.days * 24 * 3600
result = (td.microseconds + seconds * 10**6) / 10**6
return result
<commit_msg>Use float so test passes on Python 2.6<commit_after># from jaraco.util.itertools
def always_iterable(item):
"""
Given an object, always return an iterable. If the item is not
already iterable, return a tuple containing only the item.
>>> always_iterable([1,2,3])
[1, 2, 3]
>>> always_iterable('foo')
('foo',)
>>> always_iterable(None)
(None,)
>>> always_iterable(xrange(10))
xrange(10)
"""
if isinstance(item, basestring) or not hasattr(item, '__iter__'):
item = item,
return item
def total_seconds(td):
"""
Python 2.7 adds a total_seconds method to timedelta objects.
See http://docs.python.org/library/datetime.html#datetime.timedelta.total_seconds
>>> import datetime
>>> total_seconds(datetime.timedelta(hours=24))
86400.0
"""
try:
result = td.total_seconds()
except AttributeError:
seconds = td.seconds + td.days * 24 * 3600
result = float((td.microseconds + seconds * 10**6) / 10**6)
return result
|
f4adce54b573b7776cf3f56230821f982c16b49f
|
modules/helloworld/helloworld.py
|
modules/helloworld/helloworld.py
|
def run(seed):
""" function to run
Args:
seed: The value of each line striped in seed file
Returns:
String, object, list, directory, etc.
"""
name, age = seed.split(',')
return 'Hello World! {}, {}'.format(seed, int(age))
def callback(result):
""" callback function to call
Args:
result: ProcessTask instance pool_task_with_timeout() method returned
result = {
'seed': 'Jone',
'data': 'Hello World! Jone',
'exception': None
}
or
result = {
'seed': 'Jone',
'data': None,
'exception': 'ValueError: invalid literal'
}
Returns:
Anything want to return.
"""
seed = result['seed']
data = result['data']
exception = result['exception']
print('seed: "{}", data: "{}", exception: "{}"'
.format(seed, data, exception))
|
import time
def run(seed):
""" function to run
Args:
seed: The value of each line striped in seed file
Returns:
String, object, list, directory, etc.
"""
name, age = seed.split(',')
return 'Hello World! {}, {}'.format(seed, int(age))
def callback(result):
""" callback function to call
Args:
result: ProcessTask instance pool_task_with_timeout() method returned
result = {
'seed': 'Jone',
'data': 'Hello World! Jone',
'exception': None
}
result = {
'seed': 'Jone',
'data': None,
'exception': 'ValueError: invalid literal'
}
Returns:
Anything want to return.
"""
seed = result['seed']
data = result['data']
exception = result['exception']
time.sleep(0.05)
print('seed: "{}", data: "{}", exception: "{}"'
.format(seed, data, exception))
|
Add time.sleep(0.05) in test module
|
Add time.sleep(0.05) in test module
|
Python
|
mit
|
RickGray/cyberbot
|
def run(seed):
""" function to run
Args:
seed: The value of each line striped in seed file
Returns:
String, object, list, directory, etc.
"""
name, age = seed.split(',')
return 'Hello World! {}, {}'.format(seed, int(age))
def callback(result):
""" callback function to call
Args:
result: ProcessTask instance pool_task_with_timeout() method returned
result = {
'seed': 'Jone',
'data': 'Hello World! Jone',
'exception': None
}
or
result = {
'seed': 'Jone',
'data': None,
'exception': 'ValueError: invalid literal'
}
Returns:
Anything want to return.
"""
seed = result['seed']
data = result['data']
exception = result['exception']
print('seed: "{}", data: "{}", exception: "{}"'
.format(seed, data, exception))
Add time.sleep(0.05) in test module
|
import time
def run(seed):
""" function to run
Args:
seed: The value of each line striped in seed file
Returns:
String, object, list, directory, etc.
"""
name, age = seed.split(',')
return 'Hello World! {}, {}'.format(seed, int(age))
def callback(result):
""" callback function to call
Args:
result: ProcessTask instance pool_task_with_timeout() method returned
result = {
'seed': 'Jone',
'data': 'Hello World! Jone',
'exception': None
}
result = {
'seed': 'Jone',
'data': None,
'exception': 'ValueError: invalid literal'
}
Returns:
Anything want to return.
"""
seed = result['seed']
data = result['data']
exception = result['exception']
time.sleep(0.05)
print('seed: "{}", data: "{}", exception: "{}"'
.format(seed, data, exception))
|
<commit_before>def run(seed):
""" function to run
Args:
seed: The value of each line striped in seed file
Returns:
String, object, list, directory, etc.
"""
name, age = seed.split(',')
return 'Hello World! {}, {}'.format(seed, int(age))
def callback(result):
""" callback function to call
Args:
result: ProcessTask instance pool_task_with_timeout() method returned
result = {
'seed': 'Jone',
'data': 'Hello World! Jone',
'exception': None
}
or
result = {
'seed': 'Jone',
'data': None,
'exception': 'ValueError: invalid literal'
}
Returns:
Anything want to return.
"""
seed = result['seed']
data = result['data']
exception = result['exception']
print('seed: "{}", data: "{}", exception: "{}"'
.format(seed, data, exception))
<commit_msg>Add time.sleep(0.05) in test module<commit_after>
|
import time
def run(seed):
""" function to run
Args:
seed: The value of each line striped in seed file
Returns:
String, object, list, directory, etc.
"""
name, age = seed.split(',')
return 'Hello World! {}, {}'.format(seed, int(age))
def callback(result):
""" callback function to call
Args:
result: ProcessTask instance pool_task_with_timeout() method returned
result = {
'seed': 'Jone',
'data': 'Hello World! Jone',
'exception': None
}
result = {
'seed': 'Jone',
'data': None,
'exception': 'ValueError: invalid literal'
}
Returns:
Anything want to return.
"""
seed = result['seed']
data = result['data']
exception = result['exception']
time.sleep(0.05)
print('seed: "{}", data: "{}", exception: "{}"'
.format(seed, data, exception))
|
def run(seed):
""" function to run
Args:
seed: The value of each line striped in seed file
Returns:
String, object, list, directory, etc.
"""
name, age = seed.split(',')
return 'Hello World! {}, {}'.format(seed, int(age))
def callback(result):
""" callback function to call
Args:
result: ProcessTask instance pool_task_with_timeout() method returned
result = {
'seed': 'Jone',
'data': 'Hello World! Jone',
'exception': None
}
or
result = {
'seed': 'Jone',
'data': None,
'exception': 'ValueError: invalid literal'
}
Returns:
Anything want to return.
"""
seed = result['seed']
data = result['data']
exception = result['exception']
print('seed: "{}", data: "{}", exception: "{}"'
.format(seed, data, exception))
Add time.sleep(0.05) in test moduleimport time
def run(seed):
""" function to run
Args:
seed: The value of each line striped in seed file
Returns:
String, object, list, directory, etc.
"""
name, age = seed.split(',')
return 'Hello World! {}, {}'.format(seed, int(age))
def callback(result):
""" callback function to call
Args:
result: ProcessTask instance pool_task_with_timeout() method returned
result = {
'seed': 'Jone',
'data': 'Hello World! Jone',
'exception': None
}
result = {
'seed': 'Jone',
'data': None,
'exception': 'ValueError: invalid literal'
}
Returns:
Anything want to return.
"""
seed = result['seed']
data = result['data']
exception = result['exception']
time.sleep(0.05)
print('seed: "{}", data: "{}", exception: "{}"'
.format(seed, data, exception))
|
<commit_before>def run(seed):
""" function to run
Args:
seed: The value of each line striped in seed file
Returns:
String, object, list, directory, etc.
"""
name, age = seed.split(',')
return 'Hello World! {}, {}'.format(seed, int(age))
def callback(result):
""" callback function to call
Args:
result: ProcessTask instance pool_task_with_timeout() method returned
result = {
'seed': 'Jone',
'data': 'Hello World! Jone',
'exception': None
}
or
result = {
'seed': 'Jone',
'data': None,
'exception': 'ValueError: invalid literal'
}
Returns:
Anything want to return.
"""
seed = result['seed']
data = result['data']
exception = result['exception']
print('seed: "{}", data: "{}", exception: "{}"'
.format(seed, data, exception))
<commit_msg>Add time.sleep(0.05) in test module<commit_after>import time
def run(seed):
""" function to run
Args:
seed: The value of each line striped in seed file
Returns:
String, object, list, directory, etc.
"""
name, age = seed.split(',')
return 'Hello World! {}, {}'.format(seed, int(age))
def callback(result):
""" callback function to call
Args:
result: ProcessTask instance pool_task_with_timeout() method returned
result = {
'seed': 'Jone',
'data': 'Hello World! Jone',
'exception': None
}
result = {
'seed': 'Jone',
'data': None,
'exception': 'ValueError: invalid literal'
}
Returns:
Anything want to return.
"""
seed = result['seed']
data = result['data']
exception = result['exception']
time.sleep(0.05)
print('seed: "{}", data: "{}", exception: "{}"'
.format(seed, data, exception))
|
41ba2d55ed00269465d49ba22a1cb07eb899273a
|
test/test_run.py
|
test/test_run.py
|
from exp_test_helper import run_exp
import pytest
class TestRun():
"""
Run and check return code.
"""
@pytest.mark.fast
def test_run(self):
run_exp('1deg_jra55_ryf')
@pytest.mark.slow
def test_slow_run(self):
run_exp('025deg_jra55_ryf')
|
from exp_test_helper import run_exp
import pytest
class TestRun():
"""
Run and check return code.
"""
@pytest.mark.fast
def test_1deg_jra55_run(self):
run_exp('1deg_jra55_ryf')
@pytest.mark.slow
def test_1deg_core_run(self):
run_exp('1deg_core_nyf')
@pytest.mark.slow
def test_slow_run(self):
run_exp('025deg_jra55_ryf')
|
Include the 1deg core experiment in tests.
|
Include the 1deg core experiment in tests.
|
Python
|
apache-2.0
|
CWSL/access-om
|
from exp_test_helper import run_exp
import pytest
class TestRun():
"""
Run and check return code.
"""
@pytest.mark.fast
def test_run(self):
run_exp('1deg_jra55_ryf')
@pytest.mark.slow
def test_slow_run(self):
run_exp('025deg_jra55_ryf')
Include the 1deg core experiment in tests.
|
from exp_test_helper import run_exp
import pytest
class TestRun():
"""
Run and check return code.
"""
@pytest.mark.fast
def test_1deg_jra55_run(self):
run_exp('1deg_jra55_ryf')
@pytest.mark.slow
def test_1deg_core_run(self):
run_exp('1deg_core_nyf')
@pytest.mark.slow
def test_slow_run(self):
run_exp('025deg_jra55_ryf')
|
<commit_before>
from exp_test_helper import run_exp
import pytest
class TestRun():
"""
Run and check return code.
"""
@pytest.mark.fast
def test_run(self):
run_exp('1deg_jra55_ryf')
@pytest.mark.slow
def test_slow_run(self):
run_exp('025deg_jra55_ryf')
<commit_msg>Include the 1deg core experiment in tests.<commit_after>
|
from exp_test_helper import run_exp
import pytest
class TestRun():
"""
Run and check return code.
"""
@pytest.mark.fast
def test_1deg_jra55_run(self):
run_exp('1deg_jra55_ryf')
@pytest.mark.slow
def test_1deg_core_run(self):
run_exp('1deg_core_nyf')
@pytest.mark.slow
def test_slow_run(self):
run_exp('025deg_jra55_ryf')
|
from exp_test_helper import run_exp
import pytest
class TestRun():
"""
Run and check return code.
"""
@pytest.mark.fast
def test_run(self):
run_exp('1deg_jra55_ryf')
@pytest.mark.slow
def test_slow_run(self):
run_exp('025deg_jra55_ryf')
Include the 1deg core experiment in tests.
from exp_test_helper import run_exp
import pytest
class TestRun():
"""
Run and check return code.
"""
@pytest.mark.fast
def test_1deg_jra55_run(self):
run_exp('1deg_jra55_ryf')
@pytest.mark.slow
def test_1deg_core_run(self):
run_exp('1deg_core_nyf')
@pytest.mark.slow
def test_slow_run(self):
run_exp('025deg_jra55_ryf')
|
<commit_before>
from exp_test_helper import run_exp
import pytest
class TestRun():
"""
Run and check return code.
"""
@pytest.mark.fast
def test_run(self):
run_exp('1deg_jra55_ryf')
@pytest.mark.slow
def test_slow_run(self):
run_exp('025deg_jra55_ryf')
<commit_msg>Include the 1deg core experiment in tests.<commit_after>
from exp_test_helper import run_exp
import pytest
class TestRun():
"""
Run and check return code.
"""
@pytest.mark.fast
def test_1deg_jra55_run(self):
run_exp('1deg_jra55_ryf')
@pytest.mark.slow
def test_1deg_core_run(self):
run_exp('1deg_core_nyf')
@pytest.mark.slow
def test_slow_run(self):
run_exp('025deg_jra55_ryf')
|
cc3d89d4357099ba2df1628e9d91e48c743bd471
|
api/common/views.py
|
api/common/views.py
|
import subprocess
from django.conf import settings
from django.http import JsonResponse, HttpResponseBadRequest
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.models import Token
@csrf_exempt
def deploy(request):
deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY')
# branch = request.POST.get('BRANCH')
commit = request.POST.get('COMMIT')
if deploy_secret_key != settings.SECRET_KEY:
return HttpResponseBadRequest('Incorrect key.')
subprocess.Popen(['scripts/deploy.sh', commit], stdout=subprocess.PIPE)
return JsonResponse({'result': 'deploy started'})
def social_redirect(request):
token, _ = Token.objects.get_or_create(user=request.user)
return redirect('http://localhost:3000/finish-steam/{}'.format(token.key))
|
import subprocess
from django.conf import settings
from django.http import JsonResponse, HttpResponseBadRequest
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.models import Token
@csrf_exempt
def deploy(request):
deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY')
# branch = request.POST.get('BRANCH')
commit = request.POST.get('COMMIT')
if deploy_secret_key != settings.SECRET_KEY:
return HttpResponseBadRequest('Incorrect key.')
subprocess.Popen(['scripts/deploy.sh', commit], stdout=subprocess.PIPE)
return JsonResponse({'result': 'deploy started'})
def social_redirect(request):
token, _ = Token.objects.get_or_create(user=request.user)
return redirect('http://dotateamfinder.com/finish-steam/{}'.format(token.key))
|
Fix incorrect social redirect link
|
Fix incorrect social redirect link
|
Python
|
apache-2.0
|
prattl/teamfinder,prattl/teamfinder,prattl/teamfinder,prattl/teamfinder
|
import subprocess
from django.conf import settings
from django.http import JsonResponse, HttpResponseBadRequest
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.models import Token
@csrf_exempt
def deploy(request):
deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY')
# branch = request.POST.get('BRANCH')
commit = request.POST.get('COMMIT')
if deploy_secret_key != settings.SECRET_KEY:
return HttpResponseBadRequest('Incorrect key.')
subprocess.Popen(['scripts/deploy.sh', commit], stdout=subprocess.PIPE)
return JsonResponse({'result': 'deploy started'})
def social_redirect(request):
token, _ = Token.objects.get_or_create(user=request.user)
return redirect('http://localhost:3000/finish-steam/{}'.format(token.key))
Fix incorrect social redirect link
|
import subprocess
from django.conf import settings
from django.http import JsonResponse, HttpResponseBadRequest
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.models import Token
@csrf_exempt
def deploy(request):
deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY')
# branch = request.POST.get('BRANCH')
commit = request.POST.get('COMMIT')
if deploy_secret_key != settings.SECRET_KEY:
return HttpResponseBadRequest('Incorrect key.')
subprocess.Popen(['scripts/deploy.sh', commit], stdout=subprocess.PIPE)
return JsonResponse({'result': 'deploy started'})
def social_redirect(request):
token, _ = Token.objects.get_or_create(user=request.user)
return redirect('http://dotateamfinder.com/finish-steam/{}'.format(token.key))
|
<commit_before>import subprocess
from django.conf import settings
from django.http import JsonResponse, HttpResponseBadRequest
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.models import Token
@csrf_exempt
def deploy(request):
deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY')
# branch = request.POST.get('BRANCH')
commit = request.POST.get('COMMIT')
if deploy_secret_key != settings.SECRET_KEY:
return HttpResponseBadRequest('Incorrect key.')
subprocess.Popen(['scripts/deploy.sh', commit], stdout=subprocess.PIPE)
return JsonResponse({'result': 'deploy started'})
def social_redirect(request):
token, _ = Token.objects.get_or_create(user=request.user)
return redirect('http://localhost:3000/finish-steam/{}'.format(token.key))
<commit_msg>Fix incorrect social redirect link<commit_after>
|
import subprocess
from django.conf import settings
from django.http import JsonResponse, HttpResponseBadRequest
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.models import Token
@csrf_exempt
def deploy(request):
deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY')
# branch = request.POST.get('BRANCH')
commit = request.POST.get('COMMIT')
if deploy_secret_key != settings.SECRET_KEY:
return HttpResponseBadRequest('Incorrect key.')
subprocess.Popen(['scripts/deploy.sh', commit], stdout=subprocess.PIPE)
return JsonResponse({'result': 'deploy started'})
def social_redirect(request):
token, _ = Token.objects.get_or_create(user=request.user)
return redirect('http://dotateamfinder.com/finish-steam/{}'.format(token.key))
|
import subprocess
from django.conf import settings
from django.http import JsonResponse, HttpResponseBadRequest
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.models import Token
@csrf_exempt
def deploy(request):
deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY')
# branch = request.POST.get('BRANCH')
commit = request.POST.get('COMMIT')
if deploy_secret_key != settings.SECRET_KEY:
return HttpResponseBadRequest('Incorrect key.')
subprocess.Popen(['scripts/deploy.sh', commit], stdout=subprocess.PIPE)
return JsonResponse({'result': 'deploy started'})
def social_redirect(request):
token, _ = Token.objects.get_or_create(user=request.user)
return redirect('http://localhost:3000/finish-steam/{}'.format(token.key))
Fix incorrect social redirect linkimport subprocess
from django.conf import settings
from django.http import JsonResponse, HttpResponseBadRequest
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.models import Token
@csrf_exempt
def deploy(request):
deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY')
# branch = request.POST.get('BRANCH')
commit = request.POST.get('COMMIT')
if deploy_secret_key != settings.SECRET_KEY:
return HttpResponseBadRequest('Incorrect key.')
subprocess.Popen(['scripts/deploy.sh', commit], stdout=subprocess.PIPE)
return JsonResponse({'result': 'deploy started'})
def social_redirect(request):
token, _ = Token.objects.get_or_create(user=request.user)
return redirect('http://dotateamfinder.com/finish-steam/{}'.format(token.key))
|
<commit_before>import subprocess
from django.conf import settings
from django.http import JsonResponse, HttpResponseBadRequest
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.models import Token
@csrf_exempt
def deploy(request):
deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY')
# branch = request.POST.get('BRANCH')
commit = request.POST.get('COMMIT')
if deploy_secret_key != settings.SECRET_KEY:
return HttpResponseBadRequest('Incorrect key.')
subprocess.Popen(['scripts/deploy.sh', commit], stdout=subprocess.PIPE)
return JsonResponse({'result': 'deploy started'})
def social_redirect(request):
token, _ = Token.objects.get_or_create(user=request.user)
return redirect('http://localhost:3000/finish-steam/{}'.format(token.key))
<commit_msg>Fix incorrect social redirect link<commit_after>import subprocess
from django.conf import settings
from django.http import JsonResponse, HttpResponseBadRequest
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.models import Token
@csrf_exempt
def deploy(request):
deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY')
# branch = request.POST.get('BRANCH')
commit = request.POST.get('COMMIT')
if deploy_secret_key != settings.SECRET_KEY:
return HttpResponseBadRequest('Incorrect key.')
subprocess.Popen(['scripts/deploy.sh', commit], stdout=subprocess.PIPE)
return JsonResponse({'result': 'deploy started'})
def social_redirect(request):
token, _ = Token.objects.get_or_create(user=request.user)
return redirect('http://dotateamfinder.com/finish-steam/{}'.format(token.key))
|
107b97e952d731f8c55c9ca3208ecd2a41512b8d
|
tests/integration/modules/sysmod.py
|
tests/integration/modules/sysmod.py
|
import integration
class SysModuleTest(integration.ModuleCase):
'''
Validate the sys module
'''
def test_list_functions(self):
'''
sys.list_functions
'''
funcs = self.run_function('sys.list_functions')
self.assertTrue('hosts.list_hosts' in funcs)
self.assertTrue('pkg.install' in funcs)
def test_list_modules(self):
'''
sys.list_moduels
'''
mods = self.run_function('sys.list_modules')
self.assertTrue('hosts' in mods)
self.assertTrue('pkg' in mods)
if __name__ == '__main__':
from integration import run_tests
run_tests(SysModuleTest)
|
import integration
class SysModuleTest(integration.ModuleCase):
'''
Validate the sys module
'''
def test_list_functions(self):
'''
sys.list_functions
'''
funcs = self.run_function('sys.list_functions')
self.assertTrue('hosts.list_hosts' in funcs)
self.assertTrue('pkg.install' in funcs)
def test_list_modules(self):
'''
sys.list_moduels
'''
mods = self.run_function('sys.list_modules')
self.assertTrue('hosts' in mods)
self.assertTrue('pkg' in mods)
def test_valid_docs(self):
'''
Make sure no functions are exposed that don't have valid docstrings
'''
docs = self.run_function('sys.doc')
bad = set()
for fun in docs:
if fun.startswith('runtests_helpers'):
continue
if not isinstance(docs[fun], basestring):
bad.add(fun)
elif not 'Example::' in docs[fun]:
if not 'Examples::' in docs[fun]:
bad.add(fun)
if bad:
import pprint
pprint.pprint(sorted(bad))
self.assertFalse(bool(bad))
if __name__ == '__main__':
from integration import run_tests
run_tests(SysModuleTest)
|
Add test to verify loader modules
|
Add test to verify loader modules
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
import integration
class SysModuleTest(integration.ModuleCase):
'''
Validate the sys module
'''
def test_list_functions(self):
'''
sys.list_functions
'''
funcs = self.run_function('sys.list_functions')
self.assertTrue('hosts.list_hosts' in funcs)
self.assertTrue('pkg.install' in funcs)
def test_list_modules(self):
'''
sys.list_moduels
'''
mods = self.run_function('sys.list_modules')
self.assertTrue('hosts' in mods)
self.assertTrue('pkg' in mods)
if __name__ == '__main__':
from integration import run_tests
run_tests(SysModuleTest)
Add test to verify loader modules
|
import integration
class SysModuleTest(integration.ModuleCase):
'''
Validate the sys module
'''
def test_list_functions(self):
'''
sys.list_functions
'''
funcs = self.run_function('sys.list_functions')
self.assertTrue('hosts.list_hosts' in funcs)
self.assertTrue('pkg.install' in funcs)
def test_list_modules(self):
'''
sys.list_moduels
'''
mods = self.run_function('sys.list_modules')
self.assertTrue('hosts' in mods)
self.assertTrue('pkg' in mods)
def test_valid_docs(self):
'''
Make sure no functions are exposed that don't have valid docstrings
'''
docs = self.run_function('sys.doc')
bad = set()
for fun in docs:
if fun.startswith('runtests_helpers'):
continue
if not isinstance(docs[fun], basestring):
bad.add(fun)
elif not 'Example::' in docs[fun]:
if not 'Examples::' in docs[fun]:
bad.add(fun)
if bad:
import pprint
pprint.pprint(sorted(bad))
self.assertFalse(bool(bad))
if __name__ == '__main__':
from integration import run_tests
run_tests(SysModuleTest)
|
<commit_before>import integration
class SysModuleTest(integration.ModuleCase):
'''
Validate the sys module
'''
def test_list_functions(self):
'''
sys.list_functions
'''
funcs = self.run_function('sys.list_functions')
self.assertTrue('hosts.list_hosts' in funcs)
self.assertTrue('pkg.install' in funcs)
def test_list_modules(self):
'''
sys.list_moduels
'''
mods = self.run_function('sys.list_modules')
self.assertTrue('hosts' in mods)
self.assertTrue('pkg' in mods)
if __name__ == '__main__':
from integration import run_tests
run_tests(SysModuleTest)
<commit_msg>Add test to verify loader modules<commit_after>
|
import integration
class SysModuleTest(integration.ModuleCase):
'''
Validate the sys module
'''
def test_list_functions(self):
'''
sys.list_functions
'''
funcs = self.run_function('sys.list_functions')
self.assertTrue('hosts.list_hosts' in funcs)
self.assertTrue('pkg.install' in funcs)
def test_list_modules(self):
'''
sys.list_moduels
'''
mods = self.run_function('sys.list_modules')
self.assertTrue('hosts' in mods)
self.assertTrue('pkg' in mods)
def test_valid_docs(self):
'''
Make sure no functions are exposed that don't have valid docstrings
'''
docs = self.run_function('sys.doc')
bad = set()
for fun in docs:
if fun.startswith('runtests_helpers'):
continue
if not isinstance(docs[fun], basestring):
bad.add(fun)
elif not 'Example::' in docs[fun]:
if not 'Examples::' in docs[fun]:
bad.add(fun)
if bad:
import pprint
pprint.pprint(sorted(bad))
self.assertFalse(bool(bad))
if __name__ == '__main__':
from integration import run_tests
run_tests(SysModuleTest)
|
import integration
class SysModuleTest(integration.ModuleCase):
'''
Validate the sys module
'''
def test_list_functions(self):
'''
sys.list_functions
'''
funcs = self.run_function('sys.list_functions')
self.assertTrue('hosts.list_hosts' in funcs)
self.assertTrue('pkg.install' in funcs)
def test_list_modules(self):
'''
sys.list_moduels
'''
mods = self.run_function('sys.list_modules')
self.assertTrue('hosts' in mods)
self.assertTrue('pkg' in mods)
if __name__ == '__main__':
from integration import run_tests
run_tests(SysModuleTest)
Add test to verify loader modulesimport integration
class SysModuleTest(integration.ModuleCase):
'''
Validate the sys module
'''
def test_list_functions(self):
'''
sys.list_functions
'''
funcs = self.run_function('sys.list_functions')
self.assertTrue('hosts.list_hosts' in funcs)
self.assertTrue('pkg.install' in funcs)
def test_list_modules(self):
'''
sys.list_moduels
'''
mods = self.run_function('sys.list_modules')
self.assertTrue('hosts' in mods)
self.assertTrue('pkg' in mods)
def test_valid_docs(self):
'''
Make sure no functions are exposed that don't have valid docstrings
'''
docs = self.run_function('sys.doc')
bad = set()
for fun in docs:
if fun.startswith('runtests_helpers'):
continue
if not isinstance(docs[fun], basestring):
bad.add(fun)
elif not 'Example::' in docs[fun]:
if not 'Examples::' in docs[fun]:
bad.add(fun)
if bad:
import pprint
pprint.pprint(sorted(bad))
self.assertFalse(bool(bad))
if __name__ == '__main__':
from integration import run_tests
run_tests(SysModuleTest)
|
<commit_before>import integration
class SysModuleTest(integration.ModuleCase):
'''
Validate the sys module
'''
def test_list_functions(self):
'''
sys.list_functions
'''
funcs = self.run_function('sys.list_functions')
self.assertTrue('hosts.list_hosts' in funcs)
self.assertTrue('pkg.install' in funcs)
def test_list_modules(self):
'''
sys.list_moduels
'''
mods = self.run_function('sys.list_modules')
self.assertTrue('hosts' in mods)
self.assertTrue('pkg' in mods)
if __name__ == '__main__':
from integration import run_tests
run_tests(SysModuleTest)
<commit_msg>Add test to verify loader modules<commit_after>import integration
class SysModuleTest(integration.ModuleCase):
'''
Validate the sys module
'''
def test_list_functions(self):
'''
sys.list_functions
'''
funcs = self.run_function('sys.list_functions')
self.assertTrue('hosts.list_hosts' in funcs)
self.assertTrue('pkg.install' in funcs)
def test_list_modules(self):
'''
sys.list_moduels
'''
mods = self.run_function('sys.list_modules')
self.assertTrue('hosts' in mods)
self.assertTrue('pkg' in mods)
def test_valid_docs(self):
'''
Make sure no functions are exposed that don't have valid docstrings
'''
docs = self.run_function('sys.doc')
bad = set()
for fun in docs:
if fun.startswith('runtests_helpers'):
continue
if not isinstance(docs[fun], basestring):
bad.add(fun)
elif not 'Example::' in docs[fun]:
if not 'Examples::' in docs[fun]:
bad.add(fun)
if bad:
import pprint
pprint.pprint(sorted(bad))
self.assertFalse(bool(bad))
if __name__ == '__main__':
from integration import run_tests
run_tests(SysModuleTest)
|
9058d2ddc9a89913710df0efc8d7c88471592795
|
back2back/management/commands/import_entries.py
|
back2back/management/commands/import_entries.py
|
import csv
from optparse import make_option
from django.core.management import BaseCommand
from back2back.models import Entry
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
'-i', '--input',
action='store',
dest='input_file',
default=None,
),
make_option(
'--reset',
action='store_true',
dest='reset',
default=False,
),
)
def handle(self, *args, **options):
if options['reset']:
Entry.objects.all().delete()
input_file = options['input_file']
with open(input_file) as f:
reader = csv.reader(f)
for row in reader:
Entry.objects.create(category=row[0], name=row[1], first_group_number=row[2])
|
import collections
import csv
from optparse import make_option
from django.core.management import BaseCommand
from back2back.models import Entry
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
'-i', '--input',
action='store',
dest='input_file',
default=None,
),
make_option(
'--reset',
action='store_true',
dest='reset',
default=False,
),
)
def handle(self, *args, **options):
if options['reset']:
Entry.objects.all().delete()
input_file = options['input_file']
category_group_counts = collections.defaultdict(int)
with open(input_file) as f:
reader = csv.reader(f)
for row in reader:
if not row[1].strip():
continue
Entry.objects.create(
category=row[0],
name=row[1],
first_group_number=row[2],
first_group_index=category_group_counts[(row[0], row[2])],
)
category_group_counts[(row[0], row[2])] += 1
|
Save indexes as well when importing entries.
|
Save indexes as well when importing entries.
|
Python
|
bsd-2-clause
|
mjtamlyn/back2back,mjtamlyn/back2back,mjtamlyn/back2back,mjtamlyn/back2back
|
import csv
from optparse import make_option
from django.core.management import BaseCommand
from back2back.models import Entry
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
'-i', '--input',
action='store',
dest='input_file',
default=None,
),
make_option(
'--reset',
action='store_true',
dest='reset',
default=False,
),
)
def handle(self, *args, **options):
if options['reset']:
Entry.objects.all().delete()
input_file = options['input_file']
with open(input_file) as f:
reader = csv.reader(f)
for row in reader:
Entry.objects.create(category=row[0], name=row[1], first_group_number=row[2])
Save indexes as well when importing entries.
|
import collections
import csv
from optparse import make_option
from django.core.management import BaseCommand
from back2back.models import Entry
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
'-i', '--input',
action='store',
dest='input_file',
default=None,
),
make_option(
'--reset',
action='store_true',
dest='reset',
default=False,
),
)
def handle(self, *args, **options):
if options['reset']:
Entry.objects.all().delete()
input_file = options['input_file']
category_group_counts = collections.defaultdict(int)
with open(input_file) as f:
reader = csv.reader(f)
for row in reader:
if not row[1].strip():
continue
Entry.objects.create(
category=row[0],
name=row[1],
first_group_number=row[2],
first_group_index=category_group_counts[(row[0], row[2])],
)
category_group_counts[(row[0], row[2])] += 1
|
<commit_before>import csv
from optparse import make_option
from django.core.management import BaseCommand
from back2back.models import Entry
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
'-i', '--input',
action='store',
dest='input_file',
default=None,
),
make_option(
'--reset',
action='store_true',
dest='reset',
default=False,
),
)
def handle(self, *args, **options):
if options['reset']:
Entry.objects.all().delete()
input_file = options['input_file']
with open(input_file) as f:
reader = csv.reader(f)
for row in reader:
Entry.objects.create(category=row[0], name=row[1], first_group_number=row[2])
<commit_msg>Save indexes as well when importing entries.<commit_after>
|
import collections
import csv
from optparse import make_option
from django.core.management import BaseCommand
from back2back.models import Entry
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
'-i', '--input',
action='store',
dest='input_file',
default=None,
),
make_option(
'--reset',
action='store_true',
dest='reset',
default=False,
),
)
def handle(self, *args, **options):
if options['reset']:
Entry.objects.all().delete()
input_file = options['input_file']
category_group_counts = collections.defaultdict(int)
with open(input_file) as f:
reader = csv.reader(f)
for row in reader:
if not row[1].strip():
continue
Entry.objects.create(
category=row[0],
name=row[1],
first_group_number=row[2],
first_group_index=category_group_counts[(row[0], row[2])],
)
category_group_counts[(row[0], row[2])] += 1
|
import csv
from optparse import make_option
from django.core.management import BaseCommand
from back2back.models import Entry
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
'-i', '--input',
action='store',
dest='input_file',
default=None,
),
make_option(
'--reset',
action='store_true',
dest='reset',
default=False,
),
)
def handle(self, *args, **options):
if options['reset']:
Entry.objects.all().delete()
input_file = options['input_file']
with open(input_file) as f:
reader = csv.reader(f)
for row in reader:
Entry.objects.create(category=row[0], name=row[1], first_group_number=row[2])
Save indexes as well when importing entries.import collections
import csv
from optparse import make_option
from django.core.management import BaseCommand
from back2back.models import Entry
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
'-i', '--input',
action='store',
dest='input_file',
default=None,
),
make_option(
'--reset',
action='store_true',
dest='reset',
default=False,
),
)
def handle(self, *args, **options):
if options['reset']:
Entry.objects.all().delete()
input_file = options['input_file']
category_group_counts = collections.defaultdict(int)
with open(input_file) as f:
reader = csv.reader(f)
for row in reader:
if not row[1].strip():
continue
Entry.objects.create(
category=row[0],
name=row[1],
first_group_number=row[2],
first_group_index=category_group_counts[(row[0], row[2])],
)
category_group_counts[(row[0], row[2])] += 1
|
<commit_before>import csv
from optparse import make_option
from django.core.management import BaseCommand
from back2back.models import Entry
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
'-i', '--input',
action='store',
dest='input_file',
default=None,
),
make_option(
'--reset',
action='store_true',
dest='reset',
default=False,
),
)
def handle(self, *args, **options):
if options['reset']:
Entry.objects.all().delete()
input_file = options['input_file']
with open(input_file) as f:
reader = csv.reader(f)
for row in reader:
Entry.objects.create(category=row[0], name=row[1], first_group_number=row[2])
<commit_msg>Save indexes as well when importing entries.<commit_after>import collections
import csv
from optparse import make_option
from django.core.management import BaseCommand
from back2back.models import Entry
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
'-i', '--input',
action='store',
dest='input_file',
default=None,
),
make_option(
'--reset',
action='store_true',
dest='reset',
default=False,
),
)
def handle(self, *args, **options):
if options['reset']:
Entry.objects.all().delete()
input_file = options['input_file']
category_group_counts = collections.defaultdict(int)
with open(input_file) as f:
reader = csv.reader(f)
for row in reader:
if not row[1].strip():
continue
Entry.objects.create(
category=row[0],
name=row[1],
first_group_number=row[2],
first_group_index=category_group_counts[(row[0], row[2])],
)
category_group_counts[(row[0], row[2])] += 1
|
2eb8570d52c15b1061f74fe23c1f361ae8ab6d7c
|
CI/syntaxCheck.py
|
CI/syntaxCheck.py
|
import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
#"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
#"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
#"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
#"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
#"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
#"AKD":"/ApplicationExamples/AKD/package.mo",
#"N44":"/ApplicationExamples/N44/package.mo",
#"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
#"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
|
import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
#"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
#"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
#"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
#"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
#"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
#"AKD":"/ApplicationExamples/AKD/package.mo",
#"N44":"/ApplicationExamples/N44/package.mo",
#"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
#"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
|
Fix the location path of OpenIPSL
|
Fix the location path of OpenIPSL
|
Python
|
bsd-3-clause
|
SmarTS-Lab/OpenIPSL,SmarTS-Lab/OpenIPSL,tinrabuzin/OpenIPSL,OpenIPSL/OpenIPSL
|
import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
#"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
#"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
#"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
#"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
#"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
#"AKD":"/ApplicationExamples/AKD/package.mo",
#"N44":"/ApplicationExamples/N44/package.mo",
#"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
#"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
Fix the location path of OpenIPSL
|
import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
#"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
#"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
#"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
#"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
#"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
#"AKD":"/ApplicationExamples/AKD/package.mo",
#"N44":"/ApplicationExamples/N44/package.mo",
#"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
#"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
|
<commit_before>import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
#"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
#"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
#"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
#"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
#"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
#"AKD":"/ApplicationExamples/AKD/package.mo",
#"N44":"/ApplicationExamples/N44/package.mo",
#"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
#"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
<commit_msg>Fix the location path of OpenIPSL<commit_after>
|
import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
#"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
#"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
#"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
#"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
#"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
#"AKD":"/ApplicationExamples/AKD/package.mo",
#"N44":"/ApplicationExamples/N44/package.mo",
#"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
#"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
|
import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
#"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
#"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
#"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
#"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
#"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
#"AKD":"/ApplicationExamples/AKD/package.mo",
#"N44":"/ApplicationExamples/N44/package.mo",
#"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
#"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
Fix the location path of OpenIPSLimport sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
#"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
#"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
#"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
#"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
#"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
#"AKD":"/ApplicationExamples/AKD/package.mo",
#"N44":"/ApplicationExamples/N44/package.mo",
#"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
#"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
|
<commit_before>import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
#"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
#"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
#"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
#"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
#"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
#"AKD":"/ApplicationExamples/AKD/package.mo",
#"N44":"/ApplicationExamples/N44/package.mo",
#"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
#"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
<commit_msg>Fix the location path of OpenIPSL<commit_after>import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
#"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
#"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
#"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
#"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
#"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
#"AKD":"/ApplicationExamples/AKD/package.mo",
#"N44":"/ApplicationExamples/N44/package.mo",
#"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
#"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
|
47a9271a00fae3f55c79323c93feb4dc2e1fd515
|
portal/tests/models/test_profile.py
|
portal/tests/models/test_profile.py
|
from django.contrib.auth import get_user_model
from django.test import TestCase
from portal.models import Profile
class TestProfile(TestCase):
"""Profile test suite"""
users = ["john", "jane"]
UserModel = get_user_model()
def setUp(self):
for user in self.users:
self.UserModel.objects.create_user(user, f"{user}@localhost", user)
def test_profile_all(self):
profiles = Profile.objects.all()
self.assertEquals(len(profiles), len(self.users))
def test_profile_get(self):
user = self.UserModel.objects.get(username="john")
profile = Profile.objects.get(user=user)
self.assertIsNotNone(profile)
|
from django.contrib.auth import get_user_model
from django.test import TestCase
from portal.models import Profile
class TestProfile(TestCase):
"""Profile test suite"""
users = ["john", "jane"]
UserModel = get_user_model()
def setUp(self):
for user in self.users:
self.UserModel.objects.create_user(user, f"{user}@localhost", user)
def test_profile_all(self):
profiles = Profile.objects.all()
self.assertEquals(len(profiles), len(self.users))
def test_profile_get(self):
user = self.UserModel.objects.get(username="john")
profile = Profile.objects.get(user=user)
self.assertIsNotNone(profile)
def test_profile_exception(self):
self.assertRaises(Profile.DoesNotExist, Profile.objects.get, bio="Bogus")
def test_profile_empty(self):
profiles = Profile.objects.filter(bio__exact="Bogus")
self.assertEquals(len(profiles), 0)
|
Add more profile model tests
|
Add more profile model tests
|
Python
|
mit
|
huangsam/chowist,huangsam/chowist,huangsam/chowist
|
from django.contrib.auth import get_user_model
from django.test import TestCase
from portal.models import Profile
class TestProfile(TestCase):
"""Profile test suite"""
users = ["john", "jane"]
UserModel = get_user_model()
def setUp(self):
for user in self.users:
self.UserModel.objects.create_user(user, f"{user}@localhost", user)
def test_profile_all(self):
profiles = Profile.objects.all()
self.assertEquals(len(profiles), len(self.users))
def test_profile_get(self):
user = self.UserModel.objects.get(username="john")
profile = Profile.objects.get(user=user)
self.assertIsNotNone(profile)
Add more profile model tests
|
from django.contrib.auth import get_user_model
from django.test import TestCase
from portal.models import Profile
class TestProfile(TestCase):
"""Profile test suite"""
users = ["john", "jane"]
UserModel = get_user_model()
def setUp(self):
for user in self.users:
self.UserModel.objects.create_user(user, f"{user}@localhost", user)
def test_profile_all(self):
profiles = Profile.objects.all()
self.assertEquals(len(profiles), len(self.users))
def test_profile_get(self):
user = self.UserModel.objects.get(username="john")
profile = Profile.objects.get(user=user)
self.assertIsNotNone(profile)
def test_profile_exception(self):
self.assertRaises(Profile.DoesNotExist, Profile.objects.get, bio="Bogus")
def test_profile_empty(self):
profiles = Profile.objects.filter(bio__exact="Bogus")
self.assertEquals(len(profiles), 0)
|
<commit_before>from django.contrib.auth import get_user_model
from django.test import TestCase
from portal.models import Profile
class TestProfile(TestCase):
"""Profile test suite"""
users = ["john", "jane"]
UserModel = get_user_model()
def setUp(self):
for user in self.users:
self.UserModel.objects.create_user(user, f"{user}@localhost", user)
def test_profile_all(self):
profiles = Profile.objects.all()
self.assertEquals(len(profiles), len(self.users))
def test_profile_get(self):
user = self.UserModel.objects.get(username="john")
profile = Profile.objects.get(user=user)
self.assertIsNotNone(profile)
<commit_msg>Add more profile model tests<commit_after>
|
from django.contrib.auth import get_user_model
from django.test import TestCase
from portal.models import Profile
class TestProfile(TestCase):
"""Profile test suite"""
users = ["john", "jane"]
UserModel = get_user_model()
def setUp(self):
for user in self.users:
self.UserModel.objects.create_user(user, f"{user}@localhost", user)
def test_profile_all(self):
profiles = Profile.objects.all()
self.assertEquals(len(profiles), len(self.users))
def test_profile_get(self):
user = self.UserModel.objects.get(username="john")
profile = Profile.objects.get(user=user)
self.assertIsNotNone(profile)
def test_profile_exception(self):
self.assertRaises(Profile.DoesNotExist, Profile.objects.get, bio="Bogus")
def test_profile_empty(self):
profiles = Profile.objects.filter(bio__exact="Bogus")
self.assertEquals(len(profiles), 0)
|
from django.contrib.auth import get_user_model
from django.test import TestCase
from portal.models import Profile
class TestProfile(TestCase):
"""Profile test suite"""
users = ["john", "jane"]
UserModel = get_user_model()
def setUp(self):
for user in self.users:
self.UserModel.objects.create_user(user, f"{user}@localhost", user)
def test_profile_all(self):
profiles = Profile.objects.all()
self.assertEquals(len(profiles), len(self.users))
def test_profile_get(self):
user = self.UserModel.objects.get(username="john")
profile = Profile.objects.get(user=user)
self.assertIsNotNone(profile)
Add more profile model testsfrom django.contrib.auth import get_user_model
from django.test import TestCase
from portal.models import Profile
class TestProfile(TestCase):
"""Profile test suite"""
users = ["john", "jane"]
UserModel = get_user_model()
def setUp(self):
for user in self.users:
self.UserModel.objects.create_user(user, f"{user}@localhost", user)
def test_profile_all(self):
profiles = Profile.objects.all()
self.assertEquals(len(profiles), len(self.users))
def test_profile_get(self):
user = self.UserModel.objects.get(username="john")
profile = Profile.objects.get(user=user)
self.assertIsNotNone(profile)
def test_profile_exception(self):
self.assertRaises(Profile.DoesNotExist, Profile.objects.get, bio="Bogus")
def test_profile_empty(self):
profiles = Profile.objects.filter(bio__exact="Bogus")
self.assertEquals(len(profiles), 0)
|
<commit_before>from django.contrib.auth import get_user_model
from django.test import TestCase
from portal.models import Profile
class TestProfile(TestCase):
"""Profile test suite"""
users = ["john", "jane"]
UserModel = get_user_model()
def setUp(self):
for user in self.users:
self.UserModel.objects.create_user(user, f"{user}@localhost", user)
def test_profile_all(self):
profiles = Profile.objects.all()
self.assertEquals(len(profiles), len(self.users))
def test_profile_get(self):
user = self.UserModel.objects.get(username="john")
profile = Profile.objects.get(user=user)
self.assertIsNotNone(profile)
<commit_msg>Add more profile model tests<commit_after>from django.contrib.auth import get_user_model
from django.test import TestCase
from portal.models import Profile
class TestProfile(TestCase):
"""Profile test suite"""
users = ["john", "jane"]
UserModel = get_user_model()
def setUp(self):
for user in self.users:
self.UserModel.objects.create_user(user, f"{user}@localhost", user)
def test_profile_all(self):
profiles = Profile.objects.all()
self.assertEquals(len(profiles), len(self.users))
def test_profile_get(self):
user = self.UserModel.objects.get(username="john")
profile = Profile.objects.get(user=user)
self.assertIsNotNone(profile)
def test_profile_exception(self):
self.assertRaises(Profile.DoesNotExist, Profile.objects.get, bio="Bogus")
def test_profile_empty(self):
profiles = Profile.objects.filter(bio__exact="Bogus")
self.assertEquals(len(profiles), 0)
|
f1e946f5dde4648428c91bcff59728b615df021b
|
packages/Python/lldbsuite/test/lang/swift/foundation_value_types/data/TestSwiftFoundationTypeData.py
|
packages/Python/lldbsuite/test/lang/swift/foundation_value_types/data/TestSwiftFoundationTypeData.py
|
# TestSwiftFoundationValueTypes.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
import lldbsuite.test.decorators as decorators
lldbinline.MakeInlineTest(
__file__, globals(), decorators=[
decorators.skipUnlessDarwin,
decorators.expectedFailureAll(bugnumber="https://bugs.swift.org/browse/SR-3320")])
|
# TestSwiftFoundationValueTypes.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
import lldbsuite.test.decorators as decorators
lldbinline.MakeInlineTest(
__file__, globals(), decorators=[
decorators.skipUnlessDarwin])
|
Revert "x-fail this test - it was broken by changes to Data"
|
Revert "x-fail this test - it was broken by changes to Data"
This reverts commit 4f1ce1ee7ca2d897602113ac82b55f8422a849c1.
|
Python
|
apache-2.0
|
apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb
|
# TestSwiftFoundationValueTypes.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
import lldbsuite.test.decorators as decorators
lldbinline.MakeInlineTest(
__file__, globals(), decorators=[
decorators.skipUnlessDarwin,
decorators.expectedFailureAll(bugnumber="https://bugs.swift.org/browse/SR-3320")])
Revert "x-fail this test - it was broken by changes to Data"
This reverts commit 4f1ce1ee7ca2d897602113ac82b55f8422a849c1.
|
# TestSwiftFoundationValueTypes.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
import lldbsuite.test.decorators as decorators
lldbinline.MakeInlineTest(
__file__, globals(), decorators=[
decorators.skipUnlessDarwin])
|
<commit_before># TestSwiftFoundationValueTypes.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
import lldbsuite.test.decorators as decorators
lldbinline.MakeInlineTest(
__file__, globals(), decorators=[
decorators.skipUnlessDarwin,
decorators.expectedFailureAll(bugnumber="https://bugs.swift.org/browse/SR-3320")])
<commit_msg>Revert "x-fail this test - it was broken by changes to Data"
This reverts commit 4f1ce1ee7ca2d897602113ac82b55f8422a849c1.<commit_after>
|
# TestSwiftFoundationValueTypes.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
import lldbsuite.test.decorators as decorators
lldbinline.MakeInlineTest(
__file__, globals(), decorators=[
decorators.skipUnlessDarwin])
|
# TestSwiftFoundationValueTypes.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
import lldbsuite.test.decorators as decorators
lldbinline.MakeInlineTest(
__file__, globals(), decorators=[
decorators.skipUnlessDarwin,
decorators.expectedFailureAll(bugnumber="https://bugs.swift.org/browse/SR-3320")])
Revert "x-fail this test - it was broken by changes to Data"
This reverts commit 4f1ce1ee7ca2d897602113ac82b55f8422a849c1.# TestSwiftFoundationValueTypes.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
import lldbsuite.test.decorators as decorators
lldbinline.MakeInlineTest(
__file__, globals(), decorators=[
decorators.skipUnlessDarwin])
|
<commit_before># TestSwiftFoundationValueTypes.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
import lldbsuite.test.decorators as decorators
lldbinline.MakeInlineTest(
__file__, globals(), decorators=[
decorators.skipUnlessDarwin,
decorators.expectedFailureAll(bugnumber="https://bugs.swift.org/browse/SR-3320")])
<commit_msg>Revert "x-fail this test - it was broken by changes to Data"
This reverts commit 4f1ce1ee7ca2d897602113ac82b55f8422a849c1.<commit_after># TestSwiftFoundationValueTypes.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
import lldbsuite.test.decorators as decorators
lldbinline.MakeInlineTest(
__file__, globals(), decorators=[
decorators.skipUnlessDarwin])
|
76d1d1ba04e9d91559ca017c72c7291752fcc330
|
PVGeo/__tester__.py
|
PVGeo/__tester__.py
|
__all__ = [
'test',
]
import unittest
import fnmatch
import os
try:
from colour_runner.runner import ColourTextTestRunner as TextTestRunner
except ImportError:
from unittest import TextTestRunner
def test(close=False):
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
@notes:
This can be executed from either the command line of within a standard Python environment.
@example:
```bash
$ python -m PVGeo test
```
```py
>>> import PVGeo
>>> PVGeo.test()
```
"""
test_file_strings = []
for root, dirnames, filenames in os.walk(os.path.dirname(__file__)):
for filename in fnmatch.filter(filenames, '__test__.py'):
test_file_strings.append(os.path.join(root, filename))
# Remove extensions and change to module import syle
test_file_strings = [s.replace(os.path.dirname(os.path.dirname(__file__)), '') for s in test_file_strings]
module_strings = [mod[1:len(mod)-3].replace('/', '.') for mod in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(mod) for mod
in module_strings]
testSuite = unittest.TestSuite(suites)
run = TextTestRunner(verbosity=2).run(testSuite)
if close:
exit(len(run.failures) > 0 or len(run.errors) > 0)
return run
|
__all__ = [
'test',
]
import unittest
import fnmatch
import os
try:
from colour_runner.runner import ColourTextTestRunner as TextTestRunner
except ImportError:
from unittest import TextTestRunner
def test(close=False):
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
@notes:
This can be executed from either the command line of within a standard Python environment.
@example:
```bash
$ python -m PVGeo test
```
```py
>>> import PVGeo
>>> PVGeo.test()
```
"""
test_file_strings = []
for root, dirnames, filenames in os.walk(os.path.dirname(__file__)):
for filename in fnmatch.filter(filenames, '__test__.py'):
test_file_strings.append(os.path.join(root, filename))
# Remove extensions and change to module import syle
test_file_strings = [s.replace(os.path.dirname(os.path.dirname(__file__)), '') for s in test_file_strings]
print(test_file_strings)
idx = 0
if test_file_strings[0][0] == '/':
idx = 1
module_strings = [mod[idx:len(mod)-3].replace('/', '.') for mod in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(mod) for mod
in module_strings]
testSuite = unittest.TestSuite(suites)
run = TextTestRunner(verbosity=2).run(testSuite)
if close:
exit(len(run.failures) > 0 or len(run.errors) > 0)
return run
|
Fix python 2 testing issue
|
Fix python 2 testing issue
|
Python
|
bsd-3-clause
|
banesullivan/ParaViewGeophysics,banesullivan/ParaViewGeophysics,banesullivan/ParaViewGeophysics
|
__all__ = [
'test',
]
import unittest
import fnmatch
import os
try:
from colour_runner.runner import ColourTextTestRunner as TextTestRunner
except ImportError:
from unittest import TextTestRunner
def test(close=False):
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
@notes:
This can be executed from either the command line of within a standard Python environment.
@example:
```bash
$ python -m PVGeo test
```
```py
>>> import PVGeo
>>> PVGeo.test()
```
"""
test_file_strings = []
for root, dirnames, filenames in os.walk(os.path.dirname(__file__)):
for filename in fnmatch.filter(filenames, '__test__.py'):
test_file_strings.append(os.path.join(root, filename))
# Remove extensions and change to module import syle
test_file_strings = [s.replace(os.path.dirname(os.path.dirname(__file__)), '') for s in test_file_strings]
module_strings = [mod[1:len(mod)-3].replace('/', '.') for mod in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(mod) for mod
in module_strings]
testSuite = unittest.TestSuite(suites)
run = TextTestRunner(verbosity=2).run(testSuite)
if close:
exit(len(run.failures) > 0 or len(run.errors) > 0)
return run
Fix python 2 testing issue
|
__all__ = [
'test',
]
import unittest
import fnmatch
import os
try:
from colour_runner.runner import ColourTextTestRunner as TextTestRunner
except ImportError:
from unittest import TextTestRunner
def test(close=False):
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
@notes:
This can be executed from either the command line of within a standard Python environment.
@example:
```bash
$ python -m PVGeo test
```
```py
>>> import PVGeo
>>> PVGeo.test()
```
"""
test_file_strings = []
for root, dirnames, filenames in os.walk(os.path.dirname(__file__)):
for filename in fnmatch.filter(filenames, '__test__.py'):
test_file_strings.append(os.path.join(root, filename))
# Remove extensions and change to module import syle
test_file_strings = [s.replace(os.path.dirname(os.path.dirname(__file__)), '') for s in test_file_strings]
print(test_file_strings)
idx = 0
if test_file_strings[0][0] == '/':
idx = 1
module_strings = [mod[idx:len(mod)-3].replace('/', '.') for mod in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(mod) for mod
in module_strings]
testSuite = unittest.TestSuite(suites)
run = TextTestRunner(verbosity=2).run(testSuite)
if close:
exit(len(run.failures) > 0 or len(run.errors) > 0)
return run
|
<commit_before>__all__ = [
'test',
]
import unittest
import fnmatch
import os
try:
from colour_runner.runner import ColourTextTestRunner as TextTestRunner
except ImportError:
from unittest import TextTestRunner
def test(close=False):
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
@notes:
This can be executed from either the command line of within a standard Python environment.
@example:
```bash
$ python -m PVGeo test
```
```py
>>> import PVGeo
>>> PVGeo.test()
```
"""
test_file_strings = []
for root, dirnames, filenames in os.walk(os.path.dirname(__file__)):
for filename in fnmatch.filter(filenames, '__test__.py'):
test_file_strings.append(os.path.join(root, filename))
# Remove extensions and change to module import syle
test_file_strings = [s.replace(os.path.dirname(os.path.dirname(__file__)), '') for s in test_file_strings]
module_strings = [mod[1:len(mod)-3].replace('/', '.') for mod in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(mod) for mod
in module_strings]
testSuite = unittest.TestSuite(suites)
run = TextTestRunner(verbosity=2).run(testSuite)
if close:
exit(len(run.failures) > 0 or len(run.errors) > 0)
return run
<commit_msg>Fix python 2 testing issue<commit_after>
|
__all__ = [
'test',
]
import unittest
import fnmatch
import os
try:
from colour_runner.runner import ColourTextTestRunner as TextTestRunner
except ImportError:
from unittest import TextTestRunner
def test(close=False):
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
@notes:
This can be executed from either the command line of within a standard Python environment.
@example:
```bash
$ python -m PVGeo test
```
```py
>>> import PVGeo
>>> PVGeo.test()
```
"""
test_file_strings = []
for root, dirnames, filenames in os.walk(os.path.dirname(__file__)):
for filename in fnmatch.filter(filenames, '__test__.py'):
test_file_strings.append(os.path.join(root, filename))
# Remove extensions and change to module import syle
test_file_strings = [s.replace(os.path.dirname(os.path.dirname(__file__)), '') for s in test_file_strings]
print(test_file_strings)
idx = 0
if test_file_strings[0][0] == '/':
idx = 1
module_strings = [mod[idx:len(mod)-3].replace('/', '.') for mod in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(mod) for mod
in module_strings]
testSuite = unittest.TestSuite(suites)
run = TextTestRunner(verbosity=2).run(testSuite)
if close:
exit(len(run.failures) > 0 or len(run.errors) > 0)
return run
|
__all__ = [
'test',
]
import unittest
import fnmatch
import os
try:
from colour_runner.runner import ColourTextTestRunner as TextTestRunner
except ImportError:
from unittest import TextTestRunner
def test(close=False):
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
@notes:
This can be executed from either the command line of within a standard Python environment.
@example:
```bash
$ python -m PVGeo test
```
```py
>>> import PVGeo
>>> PVGeo.test()
```
"""
test_file_strings = []
for root, dirnames, filenames in os.walk(os.path.dirname(__file__)):
for filename in fnmatch.filter(filenames, '__test__.py'):
test_file_strings.append(os.path.join(root, filename))
# Remove extensions and change to module import syle
test_file_strings = [s.replace(os.path.dirname(os.path.dirname(__file__)), '') for s in test_file_strings]
module_strings = [mod[1:len(mod)-3].replace('/', '.') for mod in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(mod) for mod
in module_strings]
testSuite = unittest.TestSuite(suites)
run = TextTestRunner(verbosity=2).run(testSuite)
if close:
exit(len(run.failures) > 0 or len(run.errors) > 0)
return run
Fix python 2 testing issue__all__ = [
'test',
]
import unittest
import fnmatch
import os
try:
from colour_runner.runner import ColourTextTestRunner as TextTestRunner
except ImportError:
from unittest import TextTestRunner
def test(close=False):
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
@notes:
This can be executed from either the command line of within a standard Python environment.
@example:
```bash
$ python -m PVGeo test
```
```py
>>> import PVGeo
>>> PVGeo.test()
```
"""
test_file_strings = []
for root, dirnames, filenames in os.walk(os.path.dirname(__file__)):
for filename in fnmatch.filter(filenames, '__test__.py'):
test_file_strings.append(os.path.join(root, filename))
# Remove extensions and change to module import syle
test_file_strings = [s.replace(os.path.dirname(os.path.dirname(__file__)), '') for s in test_file_strings]
print(test_file_strings)
idx = 0
if test_file_strings[0][0] == '/':
idx = 1
module_strings = [mod[idx:len(mod)-3].replace('/', '.') for mod in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(mod) for mod
in module_strings]
testSuite = unittest.TestSuite(suites)
run = TextTestRunner(verbosity=2).run(testSuite)
if close:
exit(len(run.failures) > 0 or len(run.errors) > 0)
return run
|
<commit_before>__all__ = [
'test',
]
import unittest
import fnmatch
import os
try:
from colour_runner.runner import ColourTextTestRunner as TextTestRunner
except ImportError:
from unittest import TextTestRunner
def test(close=False):
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
@notes:
This can be executed from either the command line of within a standard Python environment.
@example:
```bash
$ python -m PVGeo test
```
```py
>>> import PVGeo
>>> PVGeo.test()
```
"""
test_file_strings = []
for root, dirnames, filenames in os.walk(os.path.dirname(__file__)):
for filename in fnmatch.filter(filenames, '__test__.py'):
test_file_strings.append(os.path.join(root, filename))
# Remove extensions and change to module import syle
test_file_strings = [s.replace(os.path.dirname(os.path.dirname(__file__)), '') for s in test_file_strings]
module_strings = [mod[1:len(mod)-3].replace('/', '.') for mod in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(mod) for mod
in module_strings]
testSuite = unittest.TestSuite(suites)
run = TextTestRunner(verbosity=2).run(testSuite)
if close:
exit(len(run.failures) > 0 or len(run.errors) > 0)
return run
<commit_msg>Fix python 2 testing issue<commit_after>__all__ = [
'test',
]
import unittest
import fnmatch
import os
try:
from colour_runner.runner import ColourTextTestRunner as TextTestRunner
except ImportError:
from unittest import TextTestRunner
def test(close=False):
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
@notes:
This can be executed from either the command line of within a standard Python environment.
@example:
```bash
$ python -m PVGeo test
```
```py
>>> import PVGeo
>>> PVGeo.test()
```
"""
test_file_strings = []
for root, dirnames, filenames in os.walk(os.path.dirname(__file__)):
for filename in fnmatch.filter(filenames, '__test__.py'):
test_file_strings.append(os.path.join(root, filename))
# Remove extensions and change to module import syle
test_file_strings = [s.replace(os.path.dirname(os.path.dirname(__file__)), '') for s in test_file_strings]
print(test_file_strings)
idx = 0
if test_file_strings[0][0] == '/':
idx = 1
module_strings = [mod[idx:len(mod)-3].replace('/', '.') for mod in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(mod) for mod
in module_strings]
testSuite = unittest.TestSuite(suites)
run = TextTestRunner(verbosity=2).run(testSuite)
if close:
exit(len(run.failures) > 0 or len(run.errors) > 0)
return run
|
f31424d48c4201e672bd47da4bd8fe205661dc4f
|
logsna/__init__.py
|
logsna/__init__.py
|
###############################################################################
#
# Copyright (c) 2012 Ruslan Spivak
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
__author__ = 'Ruslan Spivak <[email protected]>'
import time
import logging
_DEFAULT_FMT = '%(levelname)-8s [%(asctime)s] %(name)s: %(message)s'
class Formatter(logging.Formatter):
def __init__(self, fmt=None, datefmt=None):
super(Formatter, self).__init__(fmt or _DEFAULT_FMT, datefmt)
# A user-configurable function to convert the creation time to a tuple.
# It's used by Format.formatTime method and default is time.localtime()
# We set it to convert time to a struct_time in UTC
self.converter = time.gmtime
def formatException(self, exc_info):
text = super(Formatter, self).formatException(exc_info)
# Prepend ! mark to every line
text = '\n'.join(('! %s' % line) for line in text.splitlines())
return text
|
Add sane log output formatter
|
Add sane log output formatter
|
Python
|
mit
|
rspivak/logsna
|
Add sane log output formatter
|
###############################################################################
#
# Copyright (c) 2012 Ruslan Spivak
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
__author__ = 'Ruslan Spivak <[email protected]>'
import time
import logging
_DEFAULT_FMT = '%(levelname)-8s [%(asctime)s] %(name)s: %(message)s'
class Formatter(logging.Formatter):
def __init__(self, fmt=None, datefmt=None):
super(Formatter, self).__init__(fmt or _DEFAULT_FMT, datefmt)
# A user-configurable function to convert the creation time to a tuple.
# It's used by Format.formatTime method and default is time.localtime()
# We set it to convert time to a struct_time in UTC
self.converter = time.gmtime
def formatException(self, exc_info):
text = super(Formatter, self).formatException(exc_info)
# Prepend ! mark to every line
text = '\n'.join(('! %s' % line) for line in text.splitlines())
return text
|
<commit_before><commit_msg>Add sane log output formatter<commit_after>
|
###############################################################################
#
# Copyright (c) 2012 Ruslan Spivak
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
__author__ = 'Ruslan Spivak <[email protected]>'
import time
import logging
_DEFAULT_FMT = '%(levelname)-8s [%(asctime)s] %(name)s: %(message)s'
class Formatter(logging.Formatter):
def __init__(self, fmt=None, datefmt=None):
super(Formatter, self).__init__(fmt or _DEFAULT_FMT, datefmt)
# A user-configurable function to convert the creation time to a tuple.
# It's used by Format.formatTime method and default is time.localtime()
# We set it to convert time to a struct_time in UTC
self.converter = time.gmtime
def formatException(self, exc_info):
text = super(Formatter, self).formatException(exc_info)
# Prepend ! mark to every line
text = '\n'.join(('! %s' % line) for line in text.splitlines())
return text
|
Add sane log output formatter###############################################################################
#
# Copyright (c) 2012 Ruslan Spivak
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
__author__ = 'Ruslan Spivak <[email protected]>'
import time
import logging
_DEFAULT_FMT = '%(levelname)-8s [%(asctime)s] %(name)s: %(message)s'
class Formatter(logging.Formatter):
def __init__(self, fmt=None, datefmt=None):
super(Formatter, self).__init__(fmt or _DEFAULT_FMT, datefmt)
# A user-configurable function to convert the creation time to a tuple.
# It's used by Format.formatTime method and default is time.localtime()
# We set it to convert time to a struct_time in UTC
self.converter = time.gmtime
def formatException(self, exc_info):
text = super(Formatter, self).formatException(exc_info)
# Prepend ! mark to every line
text = '\n'.join(('! %s' % line) for line in text.splitlines())
return text
|
<commit_before><commit_msg>Add sane log output formatter<commit_after>###############################################################################
#
# Copyright (c) 2012 Ruslan Spivak
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
__author__ = 'Ruslan Spivak <[email protected]>'
import time
import logging
_DEFAULT_FMT = '%(levelname)-8s [%(asctime)s] %(name)s: %(message)s'
class Formatter(logging.Formatter):
def __init__(self, fmt=None, datefmt=None):
super(Formatter, self).__init__(fmt or _DEFAULT_FMT, datefmt)
# A user-configurable function to convert the creation time to a tuple.
# It's used by Format.formatTime method and default is time.localtime()
# We set it to convert time to a struct_time in UTC
self.converter = time.gmtime
def formatException(self, exc_info):
text = super(Formatter, self).formatException(exc_info)
# Prepend ! mark to every line
text = '\n'.join(('! %s' % line) for line in text.splitlines())
return text
|
|
416dea771c5750044b99e8c8bfe0755feeb3ee71
|
astropy/vo/samp/constants.py
|
astropy/vo/samp/constants.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Defines constants used in `astropy.vo.samp`."""
import os
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
__all__ = ['SAMP_STATUS_OK', 'SAMP_STATUS_WARNING', 'SAMP_STATUS_ERROR',
'SAMP_HUB_SINGLE_INSTANCE', 'SAMP_HUB_MULTIPLE_INSTANCE',
'SAFE_MTYPES', 'SAMP_ICON']
__profile_version__ = "1.3"
#: General constant for samp.ok status string
SAMP_STATUS_OK = "samp.ok"
#: General constant for samp.warning status string
SAMP_STATUS_WARNING = "samp.warning"
#: General constant for samp.error status string
SAMP_STATUS_ERROR = "samp.error"
#: General constant to specify single instance Hub running mode
SAMP_HUB_SINGLE_INSTANCE = "single"
#: General constant to specify multiple instance Hub running mode
SAMP_HUB_MULTIPLE_INSTANCE = "multiple"
SAFE_MTYPES = ["samp.app.*", "samp.msg.progress", "table.*", "image.*",
"coord.*", "spectrum.*", "bibcode.*", "voresource.*"]
with open(os.path.join(DATA_DIR, 'astropy_icon.png'), 'rb') as f:
SAMP_ICON = f.read()
try:
import ssl
except ImportError:
SSL_SUPPORT = False
else:
SSL_SUPPORT = True
del ssl
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Defines constants used in `astropy.vo.samp`."""
import os
from ...utils.data import get_pkg_data_filename
__all__ = ['SAMP_STATUS_OK', 'SAMP_STATUS_WARNING', 'SAMP_STATUS_ERROR',
'SAMP_HUB_SINGLE_INSTANCE', 'SAMP_HUB_MULTIPLE_INSTANCE',
'SAFE_MTYPES', 'SAMP_ICON']
__profile_version__ = "1.3"
#: General constant for samp.ok status string
SAMP_STATUS_OK = "samp.ok"
#: General constant for samp.warning status string
SAMP_STATUS_WARNING = "samp.warning"
#: General constant for samp.error status string
SAMP_STATUS_ERROR = "samp.error"
#: General constant to specify single instance Hub running mode
SAMP_HUB_SINGLE_INSTANCE = "single"
#: General constant to specify multiple instance Hub running mode
SAMP_HUB_MULTIPLE_INSTANCE = "multiple"
SAFE_MTYPES = ["samp.app.*", "samp.msg.progress", "table.*", "image.*",
"coord.*", "spectrum.*", "bibcode.*", "voresource.*"]
with open(get_pkg_data_filename('data/astropy_icon.png'), 'rb') as f:
SAMP_ICON = f.read()
try:
import ssl
except ImportError:
SSL_SUPPORT = False
else:
SSL_SUPPORT = True
del ssl
|
Make use of get_pkg_data_filename for icon
|
Make use of get_pkg_data_filename for icon
|
Python
|
bsd-3-clause
|
StuartLittlefair/astropy,StuartLittlefair/astropy,bsipocz/astropy,saimn/astropy,bsipocz/astropy,tbabej/astropy,dhomeier/astropy,aleksandr-bakanov/astropy,AustereCuriosity/astropy,larrybradley/astropy,mhvk/astropy,stargaser/astropy,dhomeier/astropy,pllim/astropy,kelle/astropy,DougBurke/astropy,AustereCuriosity/astropy,dhomeier/astropy,saimn/astropy,tbabej/astropy,DougBurke/astropy,larrybradley/astropy,AustereCuriosity/astropy,mhvk/astropy,kelle/astropy,StuartLittlefair/astropy,MSeifert04/astropy,astropy/astropy,StuartLittlefair/astropy,lpsinger/astropy,DougBurke/astropy,saimn/astropy,mhvk/astropy,pllim/astropy,aleksandr-bakanov/astropy,lpsinger/astropy,joergdietrich/astropy,stargaser/astropy,mhvk/astropy,AustereCuriosity/astropy,MSeifert04/astropy,funbaker/astropy,lpsinger/astropy,bsipocz/astropy,larrybradley/astropy,astropy/astropy,lpsinger/astropy,pllim/astropy,stargaser/astropy,aleksandr-bakanov/astropy,joergdietrich/astropy,tbabej/astropy,mhvk/astropy,tbabej/astropy,larrybradley/astropy,dhomeier/astropy,dhomeier/astropy,AustereCuriosity/astropy,joergdietrich/astropy,saimn/astropy,stargaser/astropy,funbaker/astropy,kelle/astropy,lpsinger/astropy,joergdietrich/astropy,funbaker/astropy,astropy/astropy,tbabej/astropy,StuartLittlefair/astropy,larrybradley/astropy,bsipocz/astropy,MSeifert04/astropy,pllim/astropy,aleksandr-bakanov/astropy,MSeifert04/astropy,funbaker/astropy,saimn/astropy,kelle/astropy,DougBurke/astropy,astropy/astropy,joergdietrich/astropy,astropy/astropy,pllim/astropy,kelle/astropy
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Defines constants used in `astropy.vo.samp`."""
import os
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
__all__ = ['SAMP_STATUS_OK', 'SAMP_STATUS_WARNING', 'SAMP_STATUS_ERROR',
'SAMP_HUB_SINGLE_INSTANCE', 'SAMP_HUB_MULTIPLE_INSTANCE',
'SAFE_MTYPES', 'SAMP_ICON']
__profile_version__ = "1.3"
#: General constant for samp.ok status string
SAMP_STATUS_OK = "samp.ok"
#: General constant for samp.warning status string
SAMP_STATUS_WARNING = "samp.warning"
#: General constant for samp.error status string
SAMP_STATUS_ERROR = "samp.error"
#: General constant to specify single instance Hub running mode
SAMP_HUB_SINGLE_INSTANCE = "single"
#: General constant to specify multiple instance Hub running mode
SAMP_HUB_MULTIPLE_INSTANCE = "multiple"
SAFE_MTYPES = ["samp.app.*", "samp.msg.progress", "table.*", "image.*",
"coord.*", "spectrum.*", "bibcode.*", "voresource.*"]
with open(os.path.join(DATA_DIR, 'astropy_icon.png'), 'rb') as f:
SAMP_ICON = f.read()
try:
import ssl
except ImportError:
SSL_SUPPORT = False
else:
SSL_SUPPORT = True
del sslMake use of get_pkg_data_filename for icon
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Defines constants used in `astropy.vo.samp`."""
import os
from ...utils.data import get_pkg_data_filename
__all__ = ['SAMP_STATUS_OK', 'SAMP_STATUS_WARNING', 'SAMP_STATUS_ERROR',
'SAMP_HUB_SINGLE_INSTANCE', 'SAMP_HUB_MULTIPLE_INSTANCE',
'SAFE_MTYPES', 'SAMP_ICON']
__profile_version__ = "1.3"
#: General constant for samp.ok status string
SAMP_STATUS_OK = "samp.ok"
#: General constant for samp.warning status string
SAMP_STATUS_WARNING = "samp.warning"
#: General constant for samp.error status string
SAMP_STATUS_ERROR = "samp.error"
#: General constant to specify single instance Hub running mode
SAMP_HUB_SINGLE_INSTANCE = "single"
#: General constant to specify multiple instance Hub running mode
SAMP_HUB_MULTIPLE_INSTANCE = "multiple"
SAFE_MTYPES = ["samp.app.*", "samp.msg.progress", "table.*", "image.*",
"coord.*", "spectrum.*", "bibcode.*", "voresource.*"]
with open(get_pkg_data_filename('data/astropy_icon.png'), 'rb') as f:
SAMP_ICON = f.read()
try:
import ssl
except ImportError:
SSL_SUPPORT = False
else:
SSL_SUPPORT = True
del ssl
|
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Defines constants used in `astropy.vo.samp`."""
import os
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
__all__ = ['SAMP_STATUS_OK', 'SAMP_STATUS_WARNING', 'SAMP_STATUS_ERROR',
'SAMP_HUB_SINGLE_INSTANCE', 'SAMP_HUB_MULTIPLE_INSTANCE',
'SAFE_MTYPES', 'SAMP_ICON']
__profile_version__ = "1.3"
#: General constant for samp.ok status string
SAMP_STATUS_OK = "samp.ok"
#: General constant for samp.warning status string
SAMP_STATUS_WARNING = "samp.warning"
#: General constant for samp.error status string
SAMP_STATUS_ERROR = "samp.error"
#: General constant to specify single instance Hub running mode
SAMP_HUB_SINGLE_INSTANCE = "single"
#: General constant to specify multiple instance Hub running mode
SAMP_HUB_MULTIPLE_INSTANCE = "multiple"
SAFE_MTYPES = ["samp.app.*", "samp.msg.progress", "table.*", "image.*",
"coord.*", "spectrum.*", "bibcode.*", "voresource.*"]
with open(os.path.join(DATA_DIR, 'astropy_icon.png'), 'rb') as f:
SAMP_ICON = f.read()
try:
import ssl
except ImportError:
SSL_SUPPORT = False
else:
SSL_SUPPORT = True
del ssl<commit_msg>Make use of get_pkg_data_filename for icon<commit_after>
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Defines constants used in `astropy.vo.samp`."""
import os
from ...utils.data import get_pkg_data_filename
__all__ = ['SAMP_STATUS_OK', 'SAMP_STATUS_WARNING', 'SAMP_STATUS_ERROR',
'SAMP_HUB_SINGLE_INSTANCE', 'SAMP_HUB_MULTIPLE_INSTANCE',
'SAFE_MTYPES', 'SAMP_ICON']
__profile_version__ = "1.3"
#: General constant for samp.ok status string
SAMP_STATUS_OK = "samp.ok"
#: General constant for samp.warning status string
SAMP_STATUS_WARNING = "samp.warning"
#: General constant for samp.error status string
SAMP_STATUS_ERROR = "samp.error"
#: General constant to specify single instance Hub running mode
SAMP_HUB_SINGLE_INSTANCE = "single"
#: General constant to specify multiple instance Hub running mode
SAMP_HUB_MULTIPLE_INSTANCE = "multiple"
SAFE_MTYPES = ["samp.app.*", "samp.msg.progress", "table.*", "image.*",
"coord.*", "spectrum.*", "bibcode.*", "voresource.*"]
with open(get_pkg_data_filename('data/astropy_icon.png'), 'rb') as f:
SAMP_ICON = f.read()
try:
import ssl
except ImportError:
SSL_SUPPORT = False
else:
SSL_SUPPORT = True
del ssl
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Defines constants used in `astropy.vo.samp`."""
import os
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
__all__ = ['SAMP_STATUS_OK', 'SAMP_STATUS_WARNING', 'SAMP_STATUS_ERROR',
'SAMP_HUB_SINGLE_INSTANCE', 'SAMP_HUB_MULTIPLE_INSTANCE',
'SAFE_MTYPES', 'SAMP_ICON']
__profile_version__ = "1.3"
#: General constant for samp.ok status string
SAMP_STATUS_OK = "samp.ok"
#: General constant for samp.warning status string
SAMP_STATUS_WARNING = "samp.warning"
#: General constant for samp.error status string
SAMP_STATUS_ERROR = "samp.error"
#: General constant to specify single instance Hub running mode
SAMP_HUB_SINGLE_INSTANCE = "single"
#: General constant to specify multiple instance Hub running mode
SAMP_HUB_MULTIPLE_INSTANCE = "multiple"
SAFE_MTYPES = ["samp.app.*", "samp.msg.progress", "table.*", "image.*",
"coord.*", "spectrum.*", "bibcode.*", "voresource.*"]
with open(os.path.join(DATA_DIR, 'astropy_icon.png'), 'rb') as f:
SAMP_ICON = f.read()
try:
import ssl
except ImportError:
SSL_SUPPORT = False
else:
SSL_SUPPORT = True
del sslMake use of get_pkg_data_filename for icon# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Defines constants used in `astropy.vo.samp`."""
import os
from ...utils.data import get_pkg_data_filename
__all__ = ['SAMP_STATUS_OK', 'SAMP_STATUS_WARNING', 'SAMP_STATUS_ERROR',
'SAMP_HUB_SINGLE_INSTANCE', 'SAMP_HUB_MULTIPLE_INSTANCE',
'SAFE_MTYPES', 'SAMP_ICON']
__profile_version__ = "1.3"
#: General constant for samp.ok status string
SAMP_STATUS_OK = "samp.ok"
#: General constant for samp.warning status string
SAMP_STATUS_WARNING = "samp.warning"
#: General constant for samp.error status string
SAMP_STATUS_ERROR = "samp.error"
#: General constant to specify single instance Hub running mode
SAMP_HUB_SINGLE_INSTANCE = "single"
#: General constant to specify multiple instance Hub running mode
SAMP_HUB_MULTIPLE_INSTANCE = "multiple"
SAFE_MTYPES = ["samp.app.*", "samp.msg.progress", "table.*", "image.*",
"coord.*", "spectrum.*", "bibcode.*", "voresource.*"]
with open(get_pkg_data_filename('data/astropy_icon.png'), 'rb') as f:
SAMP_ICON = f.read()
try:
import ssl
except ImportError:
SSL_SUPPORT = False
else:
SSL_SUPPORT = True
del ssl
|
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Defines constants used in `astropy.vo.samp`."""
import os
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
__all__ = ['SAMP_STATUS_OK', 'SAMP_STATUS_WARNING', 'SAMP_STATUS_ERROR',
'SAMP_HUB_SINGLE_INSTANCE', 'SAMP_HUB_MULTIPLE_INSTANCE',
'SAFE_MTYPES', 'SAMP_ICON']
__profile_version__ = "1.3"
#: General constant for samp.ok status string
SAMP_STATUS_OK = "samp.ok"
#: General constant for samp.warning status string
SAMP_STATUS_WARNING = "samp.warning"
#: General constant for samp.error status string
SAMP_STATUS_ERROR = "samp.error"
#: General constant to specify single instance Hub running mode
SAMP_HUB_SINGLE_INSTANCE = "single"
#: General constant to specify multiple instance Hub running mode
SAMP_HUB_MULTIPLE_INSTANCE = "multiple"
SAFE_MTYPES = ["samp.app.*", "samp.msg.progress", "table.*", "image.*",
"coord.*", "spectrum.*", "bibcode.*", "voresource.*"]
with open(os.path.join(DATA_DIR, 'astropy_icon.png'), 'rb') as f:
SAMP_ICON = f.read()
try:
import ssl
except ImportError:
SSL_SUPPORT = False
else:
SSL_SUPPORT = True
del ssl<commit_msg>Make use of get_pkg_data_filename for icon<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Defines constants used in `astropy.vo.samp`."""
import os
from ...utils.data import get_pkg_data_filename
__all__ = ['SAMP_STATUS_OK', 'SAMP_STATUS_WARNING', 'SAMP_STATUS_ERROR',
'SAMP_HUB_SINGLE_INSTANCE', 'SAMP_HUB_MULTIPLE_INSTANCE',
'SAFE_MTYPES', 'SAMP_ICON']
__profile_version__ = "1.3"
#: General constant for samp.ok status string
SAMP_STATUS_OK = "samp.ok"
#: General constant for samp.warning status string
SAMP_STATUS_WARNING = "samp.warning"
#: General constant for samp.error status string
SAMP_STATUS_ERROR = "samp.error"
#: General constant to specify single instance Hub running mode
SAMP_HUB_SINGLE_INSTANCE = "single"
#: General constant to specify multiple instance Hub running mode
SAMP_HUB_MULTIPLE_INSTANCE = "multiple"
SAFE_MTYPES = ["samp.app.*", "samp.msg.progress", "table.*", "image.*",
"coord.*", "spectrum.*", "bibcode.*", "voresource.*"]
with open(get_pkg_data_filename('data/astropy_icon.png'), 'rb') as f:
SAMP_ICON = f.read()
try:
import ssl
except ImportError:
SSL_SUPPORT = False
else:
SSL_SUPPORT = True
del ssl
|
c3a432f217917de0261d690c289a4d578a292fe3
|
common/lib/xmodule/setup.py
|
common/lib/xmodule/setup.py
|
from setuptools import setup, find_packages
setup(
name="XModule",
version="0.1",
packages=find_packages(),
install_requires=['distribute'],
package_data={
'': ['js/*']
},
# See http://guide.python-distribute.org/creation.html#entry-points
# for a description of entry_points
entry_points={
'xmodule.v1': [
"book = xmodule.translation_module:TranslateCustomTagDescriptor",
"chapter = xmodule.seq_module:SequenceDescriptor",
"course = xmodule.seq_module:SequenceDescriptor",
"customtag = xmodule.template_module:CustomTagDescriptor",
"discuss = xmodule.translation_module:TranslateCustomTagDescriptor",
"html = xmodule.html_module:HtmlDescriptor",
"image = xmodule.translation_module:TranslateCustomTagDescriptor",
"problem = xmodule.capa_module:CapaDescriptor",
"problemset = xmodule.seq_module:SequenceDescriptor",
"section = xmodule.translation_module:SemanticSectionDescriptor",
"sequential = xmodule.seq_module:SequenceDescriptor",
"slides = xmodule.translation_module:TranslateCustomTagDescriptor",
"vertical = xmodule.vertical_module:VerticalDescriptor",
"video = xmodule.video_module:VideoDescriptor",
"videodev = xmodule.translation_module:TranslateCustomTagDescriptor",
"videosequence = xmodule.seq_module:SequenceDescriptor",
]
}
)
|
from setuptools import setup, find_packages
setup(
name="XModule",
version="0.1",
packages=find_packages(),
install_requires=['distribute'],
package_data={
'': ['js/*']
},
# See http://guide.python-distribute.org/creation.html#entry-points
# for a description of entry_points
entry_points={
'xmodule.v1': [
"book = xmodule.translation_module:TranslateCustomTagDescriptor",
"chapter = xmodule.seq_module:SequenceDescriptor",
"course = xmodule.seq_module:SequenceDescriptor",
"customtag = xmodule.template_module:CustomTagDescriptor",
"discuss = xmodule.translation_module:TranslateCustomTagDescriptor",
"html = xmodule.html_module:HtmlDescriptor",
"image = xmodule.translation_module:TranslateCustomTagDescriptor",
"problem = xmodule.capa_module:CapaDescriptor",
"problemset = xmodule.vertical_module:VerticalDescriptor",
"section = xmodule.translation_module:SemanticSectionDescriptor",
"sequential = xmodule.seq_module:SequenceDescriptor",
"slides = xmodule.translation_module:TranslateCustomTagDescriptor",
"vertical = xmodule.vertical_module:VerticalDescriptor",
"video = xmodule.video_module:VideoDescriptor",
"videodev = xmodule.translation_module:TranslateCustomTagDescriptor",
"videosequence = xmodule.seq_module:SequenceDescriptor",
]
}
)
|
Make problemsets display as verticals rather than sequences
|
Make problemsets display as verticals rather than sequences
|
Python
|
agpl-3.0
|
ovnicraft/edx-platform,mjg2203/edx-platform-seas,nanolearning/edx-platform,mcgachey/edx-platform,wwj718/edx-platform,chauhanhardik/populo_2,cecep-edu/edx-platform,sudheerchintala/LearnEraPlatForm,jswope00/GAI,dkarakats/edx-platform,proversity-org/edx-platform,IONISx/edx-platform,JCBarahona/edX,angelapper/edx-platform,pdehaye/theming-edx-platform,benpatterson/edx-platform,kalebhartje/schoolboost,hamzehd/edx-platform,DefyVentures/edx-platform,cognitiveclass/edx-platform,xuxiao19910803/edx-platform,polimediaupv/edx-platform,openfun/edx-platform,UOMx/edx-platform,LearnEra/LearnEraPlaftform,pdehaye/theming-edx-platform,benpatterson/edx-platform,bdero/edx-platform,nttks/edx-platform,jamiefolsom/edx-platform,Semi-global/edx-platform,nanolearning/edx-platform,playm2mboy/edx-platform,ferabra/edx-platform,kmoocdev2/edx-platform,torchingloom/edx-platform,andyzsf/edx,franosincic/edx-platform,ubc/edx-platform,pelikanchik/edx-platform,OmarIthawi/edx-platform,miptliot/edx-platform,antoviaque/edx-platform,zerobatu/edx-platform,zerobatu/edx-platform,appliedx/edx-platform,jruiperezv/ANALYSE,BehavioralInsightsTeam/edx-platform,mitocw/edx-platform,amir-qayyum-khan/edx-platform,WatanabeYasumasa/edx-platform,zubair-arbi/edx-platform,cyanna/edx-platform,edry/edx-platform,pomegranited/edx-platform,xuxiao19910803/edx,msegado/edx-platform,fly19890211/edx-platform,dsajkl/123,MSOpenTech/edx-platform,jelugbo/tundex,proversity-org/edx-platform,nttks/jenkins-test,beacloudgenius/edx-platform,caesar2164/edx-platform,shubhdev/edxOnBaadal,eduNEXT/edunext-platform,SivilTaram/edx-platform,adoosii/edx-platform,ak2703/edx-platform,ahmadio/edx-platform,olexiim/edx-platform,pabloborrego93/edx-platform,nanolearningllc/edx-platform-cypress,jelugbo/tundex,deepsrijit1105/edx-platform,benpatterson/edx-platform,doismellburning/edx-platform,OmarIthawi/edx-platform,analyseuc3m/ANALYSE-v1,dsajkl/reqiop,alexthered/kienhoc-platform,PepperPD/edx-pepper-platform,hmcmooc/muddx-platform,proversity-org/edx-platform,mtlchun/edx,dsajkl/123,MakeHer/edx-platform,jbassen/edx-platform,adoosii/edx-platform,fintech-circle/edx-platform,motion2015/edx-platform,jswope00/griffinx,louyihua/edx-platform,dkarakats/edx-platform,raccoongang/edx-platform,nanolearningllc/edx-platform-cypress,shubhdev/openedx,a-parhom/edx-platform,ubc/edx-platform,UOMx/edx-platform,morpheby/levelup-by,analyseuc3m/ANALYSE-v1,ferabra/edx-platform,dcosentino/edx-platform,knehez/edx-platform,shubhdev/edx-platform,halvertoluke/edx-platform,devs1991/test_edx_docmode,UXE/local-edx,halvertoluke/edx-platform,abdoosh00/edraak,carsongee/edx-platform,motion2015/a3,jswope00/GAI,Livit/Livit.Learn.EdX,ZLLab-Mooc/edx-platform,tanmaykm/edx-platform,kalebhartje/schoolboost,mjg2203/edx-platform-seas,dkarakats/edx-platform,ZLLab-Mooc/edx-platform,zubair-arbi/edx-platform,pabloborrego93/edx-platform,adoosii/edx-platform,jazkarta/edx-platform-for-isc,kursitet/edx-platform,hastexo/edx-platform,xuxiao19910803/edx,playm2mboy/edx-platform,DNFcode/edx-platform,AkA84/edx-platform,ovnicraft/edx-platform,antonve/s4-project-mooc,devs1991/test_edx_docmode,ahmadiga/min_edx,olexiim/edx-platform,DNFcode/edx-platform,appsembler/edx-platform,alexthered/kienhoc-platform,vasyarv/edx-platform,morpheby/levelup-by,prarthitm/edxplatform,SravanthiSinha/edx-platform,jolyonb/edx-platform,devs1991/test_edx_docmode,chauhanhardik/populo,torchingloom/edx-platform,Endika/edx-platform,pomegranited/edx-platform,eduNEXT/edx-platform,vismartltd/edx-platform,jazztpt/edx-platform,nagyistoce/edx-platform,Ayub-Khan/edx-platform,longmen21/edx-platform,jazztpt/edx-platform,eemirtekin/edx-platform,solashirai/edx-platform,jamiefolsom/edx-platform,kxliugang/edx-platform,rue89-tech/edx-platform,prarthitm/edxplatform,appliedx/edx-platform,fly19890211/edx-platform,synergeticsedx/deployment-wipro,motion2015/edx-platform,jruiperezv/ANALYSE,devs1991/test_edx_docmode,BehavioralInsightsTeam/edx-platform,playm2mboy/edx-platform,alu042/edx-platform,Semi-global/edx-platform,Kalyzee/edx-platform,mushtaqak/edx-platform,eemirtekin/edx-platform,abdoosh00/edraak,nanolearningllc/edx-platform-cypress-2,unicri/edx-platform,mcgachey/edx-platform,Kalyzee/edx-platform,xuxiao19910803/edx,xinjiguaike/edx-platform,EduPepperPDTesting/pepper2013-testing,kursitet/edx-platform,cognitiveclass/edx-platform,naresh21/synergetics-edx-platform,DefyVentures/edx-platform,Shrhawk/edx-platform,knehez/edx-platform,mahendra-r/edx-platform,ovnicraft/edx-platform,UOMx/edx-platform,apigee/edx-platform,ahmadio/edx-platform,JioEducation/edx-platform,ahmadio/edx-platform,jazkarta/edx-platform-for-isc,kalebhartje/schoolboost,hmcmooc/muddx-platform,dkarakats/edx-platform,chand3040/cloud_that,mjirayu/sit_academy,longmen21/edx-platform,praveen-pal/edx-platform,longmen21/edx-platform,teltek/edx-platform,jazkarta/edx-platform,Lektorium-LLC/edx-platform,hastexo/edx-platform,fintech-circle/edx-platform,olexiim/edx-platform,louyihua/edx-platform,utecuy/edx-platform,openfun/edx-platform,TsinghuaX/edx-platform,zhenzhai/edx-platform,raccoongang/edx-platform,morenopc/edx-platform,mahendra-r/edx-platform,cpennington/edx-platform,jamesblunt/edx-platform,andyzsf/edx,arifsetiawan/edx-platform,dsajkl/123,eemirtekin/edx-platform,nanolearningllc/edx-platform-cypress,zhenzhai/edx-platform,10clouds/edx-platform,zadgroup/edx-platform,synergeticsedx/deployment-wipro,10clouds/edx-platform,dsajkl/reqiop,nikolas/edx-platform,jazztpt/edx-platform,vikas1885/test1,atsolakid/edx-platform,angelapper/edx-platform,EduPepperPDTesting/pepper2013-testing,jzoldak/edx-platform,antoviaque/edx-platform,mjirayu/sit_academy,hamzehd/edx-platform,beacloudgenius/edx-platform,romain-li/edx-platform,rismalrv/edx-platform,praveen-pal/edx-platform,ahmadio/edx-platform,chrisndodge/edx-platform,chudaol/edx-platform,itsjeyd/edx-platform,DefyVentures/edx-platform,chauhanhardik/populo_2,edry/edx-platform,rhndg/openedx,shashank971/edx-platform,rhndg/openedx,chudaol/edx-platform,mcgachey/edx-platform,doganov/edx-platform,procangroup/edx-platform,xuxiao19910803/edx-platform,EduPepperPD/pepper2013,unicri/edx-platform,EduPepperPD/pepper2013,angelapper/edx-platform,syjeon/new_edx,simbs/edx-platform,romain-li/edx-platform,zubair-arbi/edx-platform,mitocw/edx-platform,Ayub-Khan/edx-platform,pelikanchik/edx-platform,bitifirefly/edx-platform,franosincic/edx-platform,cecep-edu/edx-platform,CredoReference/edx-platform,jonathan-beard/edx-platform,tiagochiavericosta/edx-platform,pelikanchik/edx-platform,y12uc231/edx-platform,don-github/edx-platform,fly19890211/edx-platform,doismellburning/edx-platform,pku9104038/edx-platform,stvstnfrd/edx-platform,philanthropy-u/edx-platform,rismalrv/edx-platform,edx-solutions/edx-platform,pabloborrego93/edx-platform,bdero/edx-platform,deepsrijit1105/edx-platform,UXE/local-edx,Unow/edx-platform,defance/edx-platform,DefyVentures/edx-platform,JCBarahona/edX,bitifirefly/edx-platform,zubair-arbi/edx-platform,AkA84/edx-platform,zofuthan/edx-platform,angelapper/edx-platform,RPI-OPENEDX/edx-platform,fintech-circle/edx-platform,nikolas/edx-platform,cognitiveclass/edx-platform,shashank971/edx-platform,cselis86/edx-platform,jjmiranda/edx-platform,procangroup/edx-platform,cselis86/edx-platform,dsajkl/123,MSOpenTech/edx-platform,mcgachey/edx-platform,eestay/edx-platform,arbrandes/edx-platform,morpheby/levelup-by,shashank971/edx-platform,ferabra/edx-platform,don-github/edx-platform,nanolearningllc/edx-platform-cypress-2,raccoongang/edx-platform,apigee/edx-platform,leansoft/edx-platform,ampax/edx-platform-backup,hmcmooc/muddx-platform,kalebhartje/schoolboost,defance/edx-platform,kmoocdev/edx-platform,waheedahmed/edx-platform,arbrandes/edx-platform,auferack08/edx-platform,defance/edx-platform,Endika/edx-platform,xuxiao19910803/edx,jazkarta/edx-platform-for-isc,sameetb-cuelogic/edx-platform-test,jelugbo/tundex,lduarte1991/edx-platform,zerobatu/edx-platform,msegado/edx-platform,IONISx/edx-platform,motion2015/a3,amir-qayyum-khan/edx-platform,antonve/s4-project-mooc,Shrhawk/edx-platform,inares/edx-platform,nagyistoce/edx-platform,jamesblunt/edx-platform,jswope00/griffinx,kamalx/edx-platform,chand3040/cloud_that,beni55/edx-platform,atsolakid/edx-platform,leansoft/edx-platform,nanolearningllc/edx-platform-cypress-2,wwj718/ANALYSE,proversity-org/edx-platform,edx-solutions/edx-platform,jbzdak/edx-platform,jzoldak/edx-platform,pomegranited/edx-platform,kxliugang/edx-platform,arifsetiawan/edx-platform,dcosentino/edx-platform,inares/edx-platform,SivilTaram/edx-platform,jruiperezv/ANALYSE,4eek/edx-platform,chudaol/edx-platform,kamalx/edx-platform,motion2015/edx-platform,mtlchun/edx,TeachAtTUM/edx-platform,10clouds/edx-platform,jazkarta/edx-platform,J861449197/edx-platform,chand3040/cloud_that,cpennington/edx-platform,nttks/edx-platform,Kalyzee/edx-platform,prarthitm/edxplatform,ampax/edx-platform,jswope00/griffinx,EduPepperPD/pepper2013,nanolearningllc/edx-platform-cypress,wwj718/ANALYSE,utecuy/edx-platform,kmoocdev2/edx-platform,Edraak/circleci-edx-platform,don-github/edx-platform,jonathan-beard/edx-platform,Livit/Livit.Learn.EdX,rationalAgent/edx-platform-custom,caesar2164/edx-platform,halvertoluke/edx-platform,morenopc/edx-platform,tanmaykm/edx-platform,jbassen/edx-platform,ahmadiga/min_edx,sameetb-cuelogic/edx-platform-test,Semi-global/edx-platform,rhndg/openedx,vasyarv/edx-platform,etzhou/edx-platform,louyihua/edx-platform,TsinghuaX/edx-platform,kamalx/edx-platform,Softmotions/edx-platform,B-MOOC/edx-platform,wwj718/edx-platform,mushtaqak/edx-platform,pomegranited/edx-platform,ak2703/edx-platform,iivic/BoiseStateX,jamiefolsom/edx-platform,mjg2203/edx-platform-seas,kmoocdev/edx-platform,itsjeyd/edx-platform,stvstnfrd/edx-platform,J861449197/edx-platform,vasyarv/edx-platform,ovnicraft/edx-platform,Shrhawk/edx-platform,edx/edx-platform,AkA84/edx-platform,jolyonb/edx-platform,Unow/edx-platform,nagyistoce/edx-platform,zerobatu/edx-platform,ESOedX/edx-platform,jazztpt/edx-platform,gymnasium/edx-platform,UOMx/edx-platform,UXE/local-edx,Softmotions/edx-platform,jazkarta/edx-platform-for-isc,Edraak/edx-platform,rismalrv/edx-platform,auferack08/edx-platform,Ayub-Khan/edx-platform,ampax/edx-platform-backup,deepsrijit1105/edx-platform,cselis86/edx-platform,eduNEXT/edunext-platform,pdehaye/theming-edx-platform,LearnEra/LearnEraPlaftform,simbs/edx-platform,SravanthiSinha/edx-platform,benpatterson/edx-platform,vikas1885/test1,chauhanhardik/populo,valtech-mooc/edx-platform,dsajkl/reqiop,dkarakats/edx-platform,sameetb-cuelogic/edx-platform-test,gsehub/edx-platform,devs1991/test_edx_docmode,benpatterson/edx-platform,mjirayu/sit_academy,Shrhawk/edx-platform,cpennington/edx-platform,a-parhom/edx-platform,eestay/edx-platform,fly19890211/edx-platform,ovnicraft/edx-platform,jamesblunt/edx-platform,carsongee/edx-platform,peterm-itr/edx-platform,B-MOOC/edx-platform,cyanna/edx-platform,Livit/Livit.Learn.EdX,Livit/Livit.Learn.EdX,vikas1885/test1,zadgroup/edx-platform,cpennington/edx-platform,hkawasaki/kawasaki-aio8-0,TeachAtTUM/edx-platform,utecuy/edx-platform,gsehub/edx-platform,MakeHer/edx-platform,shashank971/edx-platform,nttks/jenkins-test,gymnasium/edx-platform,openfun/edx-platform,EduPepperPDTesting/pepper2013-testing,vismartltd/edx-platform,xinjiguaike/edx-platform,AkA84/edx-platform,a-parhom/edx-platform,ampax/edx-platform,ZLLab-Mooc/edx-platform,TsinghuaX/edx-platform,appliedx/edx-platform,nagyistoce/edx-platform,martynovp/edx-platform,unicri/edx-platform,nttks/jenkins-test,romain-li/edx-platform,devs1991/test_edx_docmode,kxliugang/edx-platform,Lektorium-LLC/edx-platform,ahmadio/edx-platform,jazkarta/edx-platform,bdero/edx-platform,waheedahmed/edx-platform,fintech-circle/edx-platform,IONISx/edx-platform,zhenzhai/edx-platform,chauhanhardik/populo,marcore/edx-platform,kmoocdev/edx-platform,Edraak/edx-platform,pepeportela/edx-platform,nanolearningllc/edx-platform-cypress,ESOedX/edx-platform,zhenzhai/edx-platform,openfun/edx-platform,shubhdev/edx-platform,solashirai/edx-platform,auferack08/edx-platform,zofuthan/edx-platform,kursitet/edx-platform,jruiperezv/ANALYSE,bigdatauniversity/edx-platform,ak2703/edx-platform,valtech-mooc/edx-platform,jbzdak/edx-platform,jbassen/edx-platform,bigdatauniversity/edx-platform,pepeportela/edx-platform,polimediaupv/edx-platform,RPI-OPENEDX/edx-platform,dcosentino/edx-platform,nikolas/edx-platform,OmarIthawi/edx-platform,jelugbo/tundex,jswope00/griffinx,playm2mboy/edx-platform,chrisndodge/edx-platform,edx-solutions/edx-platform,Edraak/circleci-edx-platform,alu042/edx-platform,hkawasaki/kawasaki-aio8-1,tanmaykm/edx-platform,mahendra-r/edx-platform,ampax/edx-platform,TsinghuaX/edx-platform,mtlchun/edx,eduNEXT/edunext-platform,pdehaye/theming-edx-platform,RPI-OPENEDX/edx-platform,kamalx/edx-platform,eduNEXT/edx-platform,SivilTaram/edx-platform,leansoft/edx-platform,ahmedaljazzar/edx-platform,EduPepperPDTesting/pepper2013-testing,Unow/edx-platform,appsembler/edx-platform,eemirtekin/edx-platform,SivilTaram/edx-platform,shubhdev/edxOnBaadal,Kalyzee/edx-platform,teltek/edx-platform,xingyepei/edx-platform,cecep-edu/edx-platform,carsongee/edx-platform,zofuthan/edx-platform,Ayub-Khan/edx-platform,arifsetiawan/edx-platform,xinjiguaike/edx-platform,tanmaykm/edx-platform,Edraak/edx-platform,nanolearning/edx-platform,zadgroup/edx-platform,xingyepei/edx-platform,doganov/edx-platform,analyseuc3m/ANALYSE-v1,hkawasaki/kawasaki-aio8-0,eestay/edx-platform,ampax/edx-platform-backup,zofuthan/edx-platform,hkawasaki/kawasaki-aio8-0,martynovp/edx-platform,ampax/edx-platform-backup,PepperPD/edx-pepper-platform,hkawasaki/kawasaki-aio8-2,wwj718/edx-platform,syjeon/new_edx,shurihell/testasia,IONISx/edx-platform,halvertoluke/edx-platform,martynovp/edx-platform,carsongee/edx-platform,LICEF/edx-platform,valtech-mooc/edx-platform,valtech-mooc/edx-platform,DNFcode/edx-platform,shabab12/edx-platform,mahendra-r/edx-platform,kxliugang/edx-platform,MakeHer/edx-platform,UXE/local-edx,JCBarahona/edX,ZLLab-Mooc/edx-platform,peterm-itr/edx-platform,naresh21/synergetics-edx-platform,msegado/edx-platform,shabab12/edx-platform,LICEF/edx-platform,romain-li/edx-platform,jonathan-beard/edx-platform,playm2mboy/edx-platform,abdoosh00/edx-rtl-final,Endika/edx-platform,gymnasium/edx-platform,antoviaque/edx-platform,torchingloom/edx-platform,rue89-tech/edx-platform,PepperPD/edx-pepper-platform,ahmadiga/min_edx,xuxiao19910803/edx-platform,mtlchun/edx,itsjeyd/edx-platform,shubhdev/openedx,bigdatauniversity/edx-platform,jbassen/edx-platform,cyanna/edx-platform,etzhou/edx-platform,Edraak/edx-platform,antoviaque/edx-platform,hamzehd/edx-platform,msegado/edx-platform,shurihell/testasia,pabloborrego93/edx-platform,motion2015/a3,EDUlib/edx-platform,mushtaqak/edx-platform,unicri/edx-platform,4eek/edx-platform,andyzsf/edx,hkawasaki/kawasaki-aio8-1,ak2703/edx-platform,olexiim/edx-platform,Edraak/edraak-platform,marcore/edx-platform,jswope00/griffinx,knehez/edx-platform,jzoldak/edx-platform,romain-li/edx-platform,DefyVentures/edx-platform,sameetb-cuelogic/edx-platform-test,arifsetiawan/edx-platform,MakeHer/edx-platform,ahmedaljazzar/edx-platform,dcosentino/edx-platform,shurihell/testasia,vikas1885/test1,jamesblunt/edx-platform,IndonesiaX/edx-platform,eduNEXT/edx-platform,LICEF/edx-platform,B-MOOC/edx-platform,mtlchun/edx,doganov/edx-platform,alu042/edx-platform,Softmotions/edx-platform,naresh21/synergetics-edx-platform,AkA84/edx-platform,jamiefolsom/edx-platform,jjmiranda/edx-platform,olexiim/edx-platform,pepeportela/edx-platform,wwj718/edx-platform,yokose-ks/edx-platform,jonathan-beard/edx-platform,prarthitm/edxplatform,Edraak/circleci-edx-platform,CourseTalk/edx-platform,ubc/edx-platform,iivic/BoiseStateX,stvstnfrd/edx-platform,motion2015/a3,IITBinterns13/edx-platform-dev,shubhdev/openedx,mitocw/edx-platform,vismartltd/edx-platform,miptliot/edx-platform,shurihell/testasia,BehavioralInsightsTeam/edx-platform,xuxiao19910803/edx-platform,edx/edx-platform,zhenzhai/edx-platform,franosincic/edx-platform,miptliot/edx-platform,martynovp/edx-platform,zadgroup/edx-platform,jbassen/edx-platform,edry/edx-platform,IITBinterns13/edx-platform-dev,amir-qayyum-khan/edx-platform,knehez/edx-platform,polimediaupv/edx-platform,beacloudgenius/edx-platform,shubhdev/openedx,Kalyzee/edx-platform,lduarte1991/edx-platform,devs1991/test_edx_docmode,y12uc231/edx-platform,simbs/edx-platform,Unow/edx-platform,CredoReference/edx-platform,vikas1885/test1,gsehub/edx-platform,beni55/edx-platform,MSOpenTech/edx-platform,kmoocdev/edx-platform,atsolakid/edx-platform,mbareta/edx-platform-ft,nanolearningllc/edx-platform-cypress-2,praveen-pal/edx-platform,yokose-ks/edx-platform,cselis86/edx-platform,franosincic/edx-platform,OmarIthawi/edx-platform,unicri/edx-platform,solashirai/edx-platform,chrisndodge/edx-platform,xingyepei/edx-platform,hamzehd/edx-platform,SravanthiSinha/edx-platform,Ayub-Khan/edx-platform,alu042/edx-platform,JCBarahona/edX,JioEducation/edx-platform,EDUlib/edx-platform,sudheerchintala/LearnEraPlatForm,waheedahmed/edx-platform,doganov/edx-platform,iivic/BoiseStateX,itsjeyd/edx-platform,yokose-ks/edx-platform,analyseuc3m/ANALYSE-v1,jamesblunt/edx-platform,mcgachey/edx-platform,beni55/edx-platform,arbrandes/edx-platform,chauhanhardik/populo_2,atsolakid/edx-platform,lduarte1991/edx-platform,xingyepei/edx-platform,alexthered/kienhoc-platform,rismalrv/edx-platform,tiagochiavericosta/edx-platform,bitifirefly/edx-platform,DNFcode/edx-platform,Edraak/edraak-platform,vasyarv/edx-platform,wwj718/ANALYSE,kursitet/edx-platform,praveen-pal/edx-platform,utecuy/edx-platform,LICEF/edx-platform,WatanabeYasumasa/edx-platform,kmoocdev2/edx-platform,JioEducation/edx-platform,don-github/edx-platform,hkawasaki/kawasaki-aio8-2,fly19890211/edx-platform,nttks/jenkins-test,tiagochiavericosta/edx-platform,jswope00/GAI,xinjiguaike/edx-platform,jazkarta/edx-platform-for-isc,jazkarta/edx-platform,xuxiao19910803/edx,jazkarta/edx-platform,bigdatauniversity/edx-platform,hamzehd/edx-platform,torchingloom/edx-platform,ahmadiga/min_edx,nikolas/edx-platform,shabab12/edx-platform,marcore/edx-platform,bigdatauniversity/edx-platform,IndonesiaX/edx-platform,atsolakid/edx-platform,zubair-arbi/edx-platform,bitifirefly/edx-platform,procangroup/edx-platform,halvertoluke/edx-platform,mahendra-r/edx-platform,nanolearning/edx-platform,4eek/edx-platform,EDUlib/edx-platform,pku9104038/edx-platform,nikolas/edx-platform,beni55/edx-platform,simbs/edx-platform,ZLLab-Mooc/edx-platform,abdoosh00/edx-rtl-final,rismalrv/edx-platform,adoosii/edx-platform,ahmedaljazzar/edx-platform,ampax/edx-platform,Lektorium-LLC/edx-platform,valtech-mooc/edx-platform,adoosii/edx-platform,rue89-tech/edx-platform,rhndg/openedx,xinjiguaike/edx-platform,EduPepperPDTesting/pepper2013-testing,edry/edx-platform,jswope00/GAI,MSOpenTech/edx-platform,chauhanhardik/populo_2,don-github/edx-platform,mushtaqak/edx-platform,SivilTaram/edx-platform,doismellburning/edx-platform,shubhdev/edx-platform,DNFcode/edx-platform,SravanthiSinha/edx-platform,zadgroup/edx-platform,openfun/edx-platform,4eek/edx-platform,philanthropy-u/edx-platform,BehavioralInsightsTeam/edx-platform,MakeHer/edx-platform,jjmiranda/edx-platform,shurihell/testasia,solashirai/edx-platform,JioEducation/edx-platform,edry/edx-platform,mbareta/edx-platform-ft,shubhdev/openedx,rue89-tech/edx-platform,rationalAgent/edx-platform-custom,knehez/edx-platform,stvstnfrd/edx-platform,motion2015/edx-platform,nttks/edx-platform,synergeticsedx/deployment-wipro,Endika/edx-platform,doismellburning/edx-platform,zerobatu/edx-platform,inares/edx-platform,edx/edx-platform,syjeon/new_edx,longmen21/edx-platform,miptliot/edx-platform,dsajkl/123,Lektorium-LLC/edx-platform,yokose-ks/edx-platform,cyanna/edx-platform,arbrandes/edx-platform,msegado/edx-platform,nanolearning/edx-platform,mbareta/edx-platform-ft,jelugbo/tundex,y12uc231/edx-platform,apigee/edx-platform,jamiefolsom/edx-platform,hkawasaki/kawasaki-aio8-2,beacloudgenius/edx-platform,Edraak/circleci-edx-platform,TeachAtTUM/edx-platform,nttks/edx-platform,edx/edx-platform,chauhanhardik/populo_2,motion2015/a3,EduPepperPDTesting/pepper2013-testing,chudaol/edx-platform,marcore/edx-platform,morenopc/edx-platform,antonve/s4-project-mooc,etzhou/edx-platform,shashank971/edx-platform,sudheerchintala/LearnEraPlatForm,hkawasaki/kawasaki-aio8-1,dsajkl/reqiop,apigee/edx-platform,bitifirefly/edx-platform,amir-qayyum-khan/edx-platform,LearnEra/LearnEraPlaftform,JCBarahona/edX,RPI-OPENEDX/edx-platform,rationalAgent/edx-platform-custom,devs1991/test_edx_docmode,vismartltd/edx-platform,caesar2164/edx-platform,pomegranited/edx-platform,jolyonb/edx-platform,cecep-edu/edx-platform,chrisndodge/edx-platform,IITBinterns13/edx-platform-dev,andyzsf/edx,B-MOOC/edx-platform,teltek/edx-platform,tiagochiavericosta/edx-platform,kursitet/edx-platform,appliedx/edx-platform,iivic/BoiseStateX,peterm-itr/edx-platform,abdoosh00/edx-rtl-final,inares/edx-platform,mitocw/edx-platform,syjeon/new_edx,vasyarv/edx-platform,chauhanhardik/populo,ESOedX/edx-platform,mjg2203/edx-platform-seas,B-MOOC/edx-platform,arifsetiawan/edx-platform,PepperPD/edx-pepper-platform,rhndg/openedx,EDUlib/edx-platform,polimediaupv/edx-platform,cyanna/edx-platform,jazztpt/edx-platform,nttks/jenkins-test,CredoReference/edx-platform,Softmotions/edx-platform,4eek/edx-platform,utecuy/edx-platform,J861449197/edx-platform,nagyistoce/edx-platform,hkawasaki/kawasaki-aio8-2,jbzdak/edx-platform,PepperPD/edx-pepper-platform,iivic/BoiseStateX,xingyepei/edx-platform,Semi-global/edx-platform,chudaol/edx-platform,appliedx/edx-platform,bdero/edx-platform,RPI-OPENEDX/edx-platform,yokose-ks/edx-platform,beacloudgenius/edx-platform,Edraak/edraak-platform,Stanford-Online/edx-platform,defance/edx-platform,ferabra/edx-platform,Edraak/edx-platform,raccoongang/edx-platform,shubhdev/edxOnBaadal,abdoosh00/edx-rtl-final,martynovp/edx-platform,leansoft/edx-platform,cecep-edu/edx-platform,alexthered/kienhoc-platform,tiagochiavericosta/edx-platform,abdoosh00/edraak,mushtaqak/edx-platform,chand3040/cloud_that,10clouds/edx-platform,IONISx/edx-platform,Edraak/circleci-edx-platform,waheedahmed/edx-platform,appsembler/edx-platform,antonve/s4-project-mooc,morpheby/levelup-by,sameetb-cuelogic/edx-platform-test,procangroup/edx-platform,y12uc231/edx-platform,polimediaupv/edx-platform,deepsrijit1105/edx-platform,nttks/edx-platform,mbareta/edx-platform-ft,auferack08/edx-platform,gymnasium/edx-platform,etzhou/edx-platform,J861449197/edx-platform,abdoosh00/edraak,wwj718/ANALYSE,kalebhartje/schoolboost,hmcmooc/muddx-platform,jbzdak/edx-platform,WatanabeYasumasa/edx-platform,teltek/edx-platform,leansoft/edx-platform,xuxiao19910803/edx-platform,hastexo/edx-platform,rationalAgent/edx-platform-custom,IITBinterns13/edx-platform-dev,EduPepperPD/pepper2013,nanolearningllc/edx-platform-cypress-2,kmoocdev/edx-platform,wwj718/edx-platform,antonve/s4-project-mooc,chauhanhardik/populo,CourseTalk/edx-platform,hkawasaki/kawasaki-aio8-0,Stanford-Online/edx-platform,louyihua/edx-platform,mjirayu/sit_academy,ubc/edx-platform,shabab12/edx-platform,doganov/edx-platform,zofuthan/edx-platform,lduarte1991/edx-platform,beni55/edx-platform,peterm-itr/edx-platform,shubhdev/edx-platform,MSOpenTech/edx-platform,hkawasaki/kawasaki-aio8-1,gsehub/edx-platform,jruiperezv/ANALYSE,eestay/edx-platform,mjirayu/sit_academy,jjmiranda/edx-platform,shubhdev/edx-platform,cognitiveclass/edx-platform,y12uc231/edx-platform,kamalx/edx-platform,solashirai/edx-platform,kmoocdev2/edx-platform,doismellburning/edx-platform,pku9104038/edx-platform,SravanthiSinha/edx-platform,pku9104038/edx-platform,caesar2164/edx-platform,Edraak/edraak-platform,naresh21/synergetics-edx-platform,jolyonb/edx-platform,jonathan-beard/edx-platform,appsembler/edx-platform,CourseTalk/edx-platform,philanthropy-u/edx-platform,pelikanchik/edx-platform,rationalAgent/edx-platform-custom,inares/edx-platform,EduPepperPD/pepper2013,kxliugang/edx-platform,etzhou/edx-platform,chand3040/cloud_that,longmen21/edx-platform,IndonesiaX/edx-platform,CredoReference/edx-platform,dcosentino/edx-platform,vismartltd/edx-platform,pepeportela/edx-platform,morenopc/edx-platform,Semi-global/edx-platform,Stanford-Online/edx-platform,eemirtekin/edx-platform,eestay/edx-platform,waheedahmed/edx-platform,jbzdak/edx-platform,alexthered/kienhoc-platform,ampax/edx-platform-backup,eduNEXT/edunext-platform,shubhdev/edxOnBaadal,J861449197/edx-platform,ahmedaljazzar/edx-platform,CourseTalk/edx-platform,cognitiveclass/edx-platform,IndonesiaX/edx-platform,jzoldak/edx-platform,morenopc/edx-platform,eduNEXT/edx-platform,edx-solutions/edx-platform,torchingloom/edx-platform,kmoocdev2/edx-platform,WatanabeYasumasa/edx-platform,philanthropy-u/edx-platform,shubhdev/edxOnBaadal,LearnEra/LearnEraPlaftform,rue89-tech/edx-platform,sudheerchintala/LearnEraPlatForm,a-parhom/edx-platform,synergeticsedx/deployment-wipro,motion2015/edx-platform,ubc/edx-platform,ferabra/edx-platform,ESOedX/edx-platform,LICEF/edx-platform,hastexo/edx-platform,Softmotions/edx-platform,franosincic/edx-platform,IndonesiaX/edx-platform,TeachAtTUM/edx-platform,cselis86/edx-platform,Shrhawk/edx-platform,simbs/edx-platform,ak2703/edx-platform,wwj718/ANALYSE,ahmadiga/min_edx,Stanford-Online/edx-platform
|
from setuptools import setup, find_packages
setup(
name="XModule",
version="0.1",
packages=find_packages(),
install_requires=['distribute'],
package_data={
'': ['js/*']
},
# See http://guide.python-distribute.org/creation.html#entry-points
# for a description of entry_points
entry_points={
'xmodule.v1': [
"book = xmodule.translation_module:TranslateCustomTagDescriptor",
"chapter = xmodule.seq_module:SequenceDescriptor",
"course = xmodule.seq_module:SequenceDescriptor",
"customtag = xmodule.template_module:CustomTagDescriptor",
"discuss = xmodule.translation_module:TranslateCustomTagDescriptor",
"html = xmodule.html_module:HtmlDescriptor",
"image = xmodule.translation_module:TranslateCustomTagDescriptor",
"problem = xmodule.capa_module:CapaDescriptor",
"problemset = xmodule.seq_module:SequenceDescriptor",
"section = xmodule.translation_module:SemanticSectionDescriptor",
"sequential = xmodule.seq_module:SequenceDescriptor",
"slides = xmodule.translation_module:TranslateCustomTagDescriptor",
"vertical = xmodule.vertical_module:VerticalDescriptor",
"video = xmodule.video_module:VideoDescriptor",
"videodev = xmodule.translation_module:TranslateCustomTagDescriptor",
"videosequence = xmodule.seq_module:SequenceDescriptor",
]
}
)
Make problemsets display as verticals rather than sequences
|
from setuptools import setup, find_packages
setup(
name="XModule",
version="0.1",
packages=find_packages(),
install_requires=['distribute'],
package_data={
'': ['js/*']
},
# See http://guide.python-distribute.org/creation.html#entry-points
# for a description of entry_points
entry_points={
'xmodule.v1': [
"book = xmodule.translation_module:TranslateCustomTagDescriptor",
"chapter = xmodule.seq_module:SequenceDescriptor",
"course = xmodule.seq_module:SequenceDescriptor",
"customtag = xmodule.template_module:CustomTagDescriptor",
"discuss = xmodule.translation_module:TranslateCustomTagDescriptor",
"html = xmodule.html_module:HtmlDescriptor",
"image = xmodule.translation_module:TranslateCustomTagDescriptor",
"problem = xmodule.capa_module:CapaDescriptor",
"problemset = xmodule.vertical_module:VerticalDescriptor",
"section = xmodule.translation_module:SemanticSectionDescriptor",
"sequential = xmodule.seq_module:SequenceDescriptor",
"slides = xmodule.translation_module:TranslateCustomTagDescriptor",
"vertical = xmodule.vertical_module:VerticalDescriptor",
"video = xmodule.video_module:VideoDescriptor",
"videodev = xmodule.translation_module:TranslateCustomTagDescriptor",
"videosequence = xmodule.seq_module:SequenceDescriptor",
]
}
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="XModule",
version="0.1",
packages=find_packages(),
install_requires=['distribute'],
package_data={
'': ['js/*']
},
# See http://guide.python-distribute.org/creation.html#entry-points
# for a description of entry_points
entry_points={
'xmodule.v1': [
"book = xmodule.translation_module:TranslateCustomTagDescriptor",
"chapter = xmodule.seq_module:SequenceDescriptor",
"course = xmodule.seq_module:SequenceDescriptor",
"customtag = xmodule.template_module:CustomTagDescriptor",
"discuss = xmodule.translation_module:TranslateCustomTagDescriptor",
"html = xmodule.html_module:HtmlDescriptor",
"image = xmodule.translation_module:TranslateCustomTagDescriptor",
"problem = xmodule.capa_module:CapaDescriptor",
"problemset = xmodule.seq_module:SequenceDescriptor",
"section = xmodule.translation_module:SemanticSectionDescriptor",
"sequential = xmodule.seq_module:SequenceDescriptor",
"slides = xmodule.translation_module:TranslateCustomTagDescriptor",
"vertical = xmodule.vertical_module:VerticalDescriptor",
"video = xmodule.video_module:VideoDescriptor",
"videodev = xmodule.translation_module:TranslateCustomTagDescriptor",
"videosequence = xmodule.seq_module:SequenceDescriptor",
]
}
)
<commit_msg>Make problemsets display as verticals rather than sequences<commit_after>
|
from setuptools import setup, find_packages
setup(
name="XModule",
version="0.1",
packages=find_packages(),
install_requires=['distribute'],
package_data={
'': ['js/*']
},
# See http://guide.python-distribute.org/creation.html#entry-points
# for a description of entry_points
entry_points={
'xmodule.v1': [
"book = xmodule.translation_module:TranslateCustomTagDescriptor",
"chapter = xmodule.seq_module:SequenceDescriptor",
"course = xmodule.seq_module:SequenceDescriptor",
"customtag = xmodule.template_module:CustomTagDescriptor",
"discuss = xmodule.translation_module:TranslateCustomTagDescriptor",
"html = xmodule.html_module:HtmlDescriptor",
"image = xmodule.translation_module:TranslateCustomTagDescriptor",
"problem = xmodule.capa_module:CapaDescriptor",
"problemset = xmodule.vertical_module:VerticalDescriptor",
"section = xmodule.translation_module:SemanticSectionDescriptor",
"sequential = xmodule.seq_module:SequenceDescriptor",
"slides = xmodule.translation_module:TranslateCustomTagDescriptor",
"vertical = xmodule.vertical_module:VerticalDescriptor",
"video = xmodule.video_module:VideoDescriptor",
"videodev = xmodule.translation_module:TranslateCustomTagDescriptor",
"videosequence = xmodule.seq_module:SequenceDescriptor",
]
}
)
|
from setuptools import setup, find_packages
setup(
name="XModule",
version="0.1",
packages=find_packages(),
install_requires=['distribute'],
package_data={
'': ['js/*']
},
# See http://guide.python-distribute.org/creation.html#entry-points
# for a description of entry_points
entry_points={
'xmodule.v1': [
"book = xmodule.translation_module:TranslateCustomTagDescriptor",
"chapter = xmodule.seq_module:SequenceDescriptor",
"course = xmodule.seq_module:SequenceDescriptor",
"customtag = xmodule.template_module:CustomTagDescriptor",
"discuss = xmodule.translation_module:TranslateCustomTagDescriptor",
"html = xmodule.html_module:HtmlDescriptor",
"image = xmodule.translation_module:TranslateCustomTagDescriptor",
"problem = xmodule.capa_module:CapaDescriptor",
"problemset = xmodule.seq_module:SequenceDescriptor",
"section = xmodule.translation_module:SemanticSectionDescriptor",
"sequential = xmodule.seq_module:SequenceDescriptor",
"slides = xmodule.translation_module:TranslateCustomTagDescriptor",
"vertical = xmodule.vertical_module:VerticalDescriptor",
"video = xmodule.video_module:VideoDescriptor",
"videodev = xmodule.translation_module:TranslateCustomTagDescriptor",
"videosequence = xmodule.seq_module:SequenceDescriptor",
]
}
)
Make problemsets display as verticals rather than sequencesfrom setuptools import setup, find_packages
setup(
name="XModule",
version="0.1",
packages=find_packages(),
install_requires=['distribute'],
package_data={
'': ['js/*']
},
# See http://guide.python-distribute.org/creation.html#entry-points
# for a description of entry_points
entry_points={
'xmodule.v1': [
"book = xmodule.translation_module:TranslateCustomTagDescriptor",
"chapter = xmodule.seq_module:SequenceDescriptor",
"course = xmodule.seq_module:SequenceDescriptor",
"customtag = xmodule.template_module:CustomTagDescriptor",
"discuss = xmodule.translation_module:TranslateCustomTagDescriptor",
"html = xmodule.html_module:HtmlDescriptor",
"image = xmodule.translation_module:TranslateCustomTagDescriptor",
"problem = xmodule.capa_module:CapaDescriptor",
"problemset = xmodule.vertical_module:VerticalDescriptor",
"section = xmodule.translation_module:SemanticSectionDescriptor",
"sequential = xmodule.seq_module:SequenceDescriptor",
"slides = xmodule.translation_module:TranslateCustomTagDescriptor",
"vertical = xmodule.vertical_module:VerticalDescriptor",
"video = xmodule.video_module:VideoDescriptor",
"videodev = xmodule.translation_module:TranslateCustomTagDescriptor",
"videosequence = xmodule.seq_module:SequenceDescriptor",
]
}
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="XModule",
version="0.1",
packages=find_packages(),
install_requires=['distribute'],
package_data={
'': ['js/*']
},
# See http://guide.python-distribute.org/creation.html#entry-points
# for a description of entry_points
entry_points={
'xmodule.v1': [
"book = xmodule.translation_module:TranslateCustomTagDescriptor",
"chapter = xmodule.seq_module:SequenceDescriptor",
"course = xmodule.seq_module:SequenceDescriptor",
"customtag = xmodule.template_module:CustomTagDescriptor",
"discuss = xmodule.translation_module:TranslateCustomTagDescriptor",
"html = xmodule.html_module:HtmlDescriptor",
"image = xmodule.translation_module:TranslateCustomTagDescriptor",
"problem = xmodule.capa_module:CapaDescriptor",
"problemset = xmodule.seq_module:SequenceDescriptor",
"section = xmodule.translation_module:SemanticSectionDescriptor",
"sequential = xmodule.seq_module:SequenceDescriptor",
"slides = xmodule.translation_module:TranslateCustomTagDescriptor",
"vertical = xmodule.vertical_module:VerticalDescriptor",
"video = xmodule.video_module:VideoDescriptor",
"videodev = xmodule.translation_module:TranslateCustomTagDescriptor",
"videosequence = xmodule.seq_module:SequenceDescriptor",
]
}
)
<commit_msg>Make problemsets display as verticals rather than sequences<commit_after>from setuptools import setup, find_packages
setup(
name="XModule",
version="0.1",
packages=find_packages(),
install_requires=['distribute'],
package_data={
'': ['js/*']
},
# See http://guide.python-distribute.org/creation.html#entry-points
# for a description of entry_points
entry_points={
'xmodule.v1': [
"book = xmodule.translation_module:TranslateCustomTagDescriptor",
"chapter = xmodule.seq_module:SequenceDescriptor",
"course = xmodule.seq_module:SequenceDescriptor",
"customtag = xmodule.template_module:CustomTagDescriptor",
"discuss = xmodule.translation_module:TranslateCustomTagDescriptor",
"html = xmodule.html_module:HtmlDescriptor",
"image = xmodule.translation_module:TranslateCustomTagDescriptor",
"problem = xmodule.capa_module:CapaDescriptor",
"problemset = xmodule.vertical_module:VerticalDescriptor",
"section = xmodule.translation_module:SemanticSectionDescriptor",
"sequential = xmodule.seq_module:SequenceDescriptor",
"slides = xmodule.translation_module:TranslateCustomTagDescriptor",
"vertical = xmodule.vertical_module:VerticalDescriptor",
"video = xmodule.video_module:VideoDescriptor",
"videodev = xmodule.translation_module:TranslateCustomTagDescriptor",
"videosequence = xmodule.seq_module:SequenceDescriptor",
]
}
)
|
745ec6f3dd227cc00c3db0d100b005fb6fd4d903
|
test/on_yubikey/test_cli_openpgp.py
|
test/on_yubikey/test_cli_openpgp.py
|
import unittest
from ykman.util import TRANSPORT
from .util import (DestructiveYubikeyTestCase, missing_mode, ykman_cli)
@unittest.skipIf(*missing_mode(TRANSPORT.CCID))
class TestOpenPGP(DestructiveYubikeyTestCase):
def test_openpgp_info(self):
output = ykman_cli('openpgp', 'info')
self.assertIn('OpenPGP version:', output)
def test_openpgp_reset(self):
output = ykman_cli('openpgp', 'reset', '-f')
self.assertIn(
'Success! All data has been cleared and default PINs are set.',
output)
|
import unittest
from ykman.util import TRANSPORT
from .util import (DestructiveYubikeyTestCase, missing_mode, ykman_cli)
@unittest.skipIf(*missing_mode(TRANSPORT.CCID))
class TestOpenPGP(DestructiveYubikeyTestCase):
def setUp(self):
ykman_cli('openpgp', 'reset', '-f')
def test_openpgp_info(self):
output = ykman_cli('openpgp', 'info')
self.assertIn('OpenPGP version:', output)
def test_openpgp_reset(self):
output = ykman_cli('openpgp', 'reset', '-f')
self.assertIn(
'Success! All data has been cleared and default PINs are set.',
output)
|
Reset OpenPGP applet before each test
|
Reset OpenPGP applet before each test
|
Python
|
bsd-2-clause
|
Yubico/yubikey-manager,Yubico/yubikey-manager
|
import unittest
from ykman.util import TRANSPORT
from .util import (DestructiveYubikeyTestCase, missing_mode, ykman_cli)
@unittest.skipIf(*missing_mode(TRANSPORT.CCID))
class TestOpenPGP(DestructiveYubikeyTestCase):
def test_openpgp_info(self):
output = ykman_cli('openpgp', 'info')
self.assertIn('OpenPGP version:', output)
def test_openpgp_reset(self):
output = ykman_cli('openpgp', 'reset', '-f')
self.assertIn(
'Success! All data has been cleared and default PINs are set.',
output)
Reset OpenPGP applet before each test
|
import unittest
from ykman.util import TRANSPORT
from .util import (DestructiveYubikeyTestCase, missing_mode, ykman_cli)
@unittest.skipIf(*missing_mode(TRANSPORT.CCID))
class TestOpenPGP(DestructiveYubikeyTestCase):
def setUp(self):
ykman_cli('openpgp', 'reset', '-f')
def test_openpgp_info(self):
output = ykman_cli('openpgp', 'info')
self.assertIn('OpenPGP version:', output)
def test_openpgp_reset(self):
output = ykman_cli('openpgp', 'reset', '-f')
self.assertIn(
'Success! All data has been cleared and default PINs are set.',
output)
|
<commit_before>import unittest
from ykman.util import TRANSPORT
from .util import (DestructiveYubikeyTestCase, missing_mode, ykman_cli)
@unittest.skipIf(*missing_mode(TRANSPORT.CCID))
class TestOpenPGP(DestructiveYubikeyTestCase):
def test_openpgp_info(self):
output = ykman_cli('openpgp', 'info')
self.assertIn('OpenPGP version:', output)
def test_openpgp_reset(self):
output = ykman_cli('openpgp', 'reset', '-f')
self.assertIn(
'Success! All data has been cleared and default PINs are set.',
output)
<commit_msg>Reset OpenPGP applet before each test<commit_after>
|
import unittest
from ykman.util import TRANSPORT
from .util import (DestructiveYubikeyTestCase, missing_mode, ykman_cli)
@unittest.skipIf(*missing_mode(TRANSPORT.CCID))
class TestOpenPGP(DestructiveYubikeyTestCase):
def setUp(self):
ykman_cli('openpgp', 'reset', '-f')
def test_openpgp_info(self):
output = ykman_cli('openpgp', 'info')
self.assertIn('OpenPGP version:', output)
def test_openpgp_reset(self):
output = ykman_cli('openpgp', 'reset', '-f')
self.assertIn(
'Success! All data has been cleared and default PINs are set.',
output)
|
import unittest
from ykman.util import TRANSPORT
from .util import (DestructiveYubikeyTestCase, missing_mode, ykman_cli)
@unittest.skipIf(*missing_mode(TRANSPORT.CCID))
class TestOpenPGP(DestructiveYubikeyTestCase):
def test_openpgp_info(self):
output = ykman_cli('openpgp', 'info')
self.assertIn('OpenPGP version:', output)
def test_openpgp_reset(self):
output = ykman_cli('openpgp', 'reset', '-f')
self.assertIn(
'Success! All data has been cleared and default PINs are set.',
output)
Reset OpenPGP applet before each testimport unittest
from ykman.util import TRANSPORT
from .util import (DestructiveYubikeyTestCase, missing_mode, ykman_cli)
@unittest.skipIf(*missing_mode(TRANSPORT.CCID))
class TestOpenPGP(DestructiveYubikeyTestCase):
def setUp(self):
ykman_cli('openpgp', 'reset', '-f')
def test_openpgp_info(self):
output = ykman_cli('openpgp', 'info')
self.assertIn('OpenPGP version:', output)
def test_openpgp_reset(self):
output = ykman_cli('openpgp', 'reset', '-f')
self.assertIn(
'Success! All data has been cleared and default PINs are set.',
output)
|
<commit_before>import unittest
from ykman.util import TRANSPORT
from .util import (DestructiveYubikeyTestCase, missing_mode, ykman_cli)
@unittest.skipIf(*missing_mode(TRANSPORT.CCID))
class TestOpenPGP(DestructiveYubikeyTestCase):
def test_openpgp_info(self):
output = ykman_cli('openpgp', 'info')
self.assertIn('OpenPGP version:', output)
def test_openpgp_reset(self):
output = ykman_cli('openpgp', 'reset', '-f')
self.assertIn(
'Success! All data has been cleared and default PINs are set.',
output)
<commit_msg>Reset OpenPGP applet before each test<commit_after>import unittest
from ykman.util import TRANSPORT
from .util import (DestructiveYubikeyTestCase, missing_mode, ykman_cli)
@unittest.skipIf(*missing_mode(TRANSPORT.CCID))
class TestOpenPGP(DestructiveYubikeyTestCase):
def setUp(self):
ykman_cli('openpgp', 'reset', '-f')
def test_openpgp_info(self):
output = ykman_cli('openpgp', 'info')
self.assertIn('OpenPGP version:', output)
def test_openpgp_reset(self):
output = ykman_cli('openpgp', 'reset', '-f')
self.assertIn(
'Success! All data has been cleared and default PINs are set.',
output)
|
f1b0aa70569052fae2677989f265e6619df16f17
|
config/deployed/settings.py
|
config/deployed/settings.py
|
#!/usr/bin/env python
from config.settings import *
import logging
log = logging.getLogger('settings')
SETTINGS = 'deployed'
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# Static media
STATIC_ROOT = '/mnt/media'
# Uploads
MEDIA_ROOT = '/mnt/panda'
# Django-compressor
COMPRESS_ENABLED = True
if EMAIL_HOST == 'localhost':
log.warn('EMAIL_HOST not configured!')
|
#!/usr/bin/env python
from config.settings import *
SETTINGS = 'deployed'
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# Static media
STATIC_ROOT = '/mnt/media'
# Uploads
MEDIA_ROOT = '/mnt/panda'
# Django-compressor
COMPRESS_ENABLED = True
if EMAIL_HOST == 'localhost':
raise ValueError('EMAIL_HOST not configured!')
|
Throw error instead of logging.
|
Throw error instead of logging.
|
Python
|
mit
|
ibrahimcesar/panda,pandaproject/panda,newsapps/panda,NUKnightLab/panda,pandaproject/panda,pandaproject/panda,datadesk/panda,pandaproject/panda,newsapps/panda,datadesk/panda,PalmBeachPost/panda,PalmBeachPost/panda,ibrahimcesar/panda,PalmBeachPost/panda,newsapps/panda,datadesk/panda,PalmBeachPost/panda,ibrahimcesar/panda,NUKnightLab/panda,datadesk/panda,NUKnightLab/panda,newsapps/panda,PalmBeachPost/panda,pandaproject/panda,ibrahimcesar/panda,ibrahimcesar/panda,NUKnightLab/panda,datadesk/panda
|
#!/usr/bin/env python
from config.settings import *
import logging
log = logging.getLogger('settings')
SETTINGS = 'deployed'
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# Static media
STATIC_ROOT = '/mnt/media'
# Uploads
MEDIA_ROOT = '/mnt/panda'
# Django-compressor
COMPRESS_ENABLED = True
if EMAIL_HOST == 'localhost':
log.warn('EMAIL_HOST not configured!')
Throw error instead of logging.
|
#!/usr/bin/env python
from config.settings import *
SETTINGS = 'deployed'
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# Static media
STATIC_ROOT = '/mnt/media'
# Uploads
MEDIA_ROOT = '/mnt/panda'
# Django-compressor
COMPRESS_ENABLED = True
if EMAIL_HOST == 'localhost':
raise ValueError('EMAIL_HOST not configured!')
|
<commit_before>#!/usr/bin/env python
from config.settings import *
import logging
log = logging.getLogger('settings')
SETTINGS = 'deployed'
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# Static media
STATIC_ROOT = '/mnt/media'
# Uploads
MEDIA_ROOT = '/mnt/panda'
# Django-compressor
COMPRESS_ENABLED = True
if EMAIL_HOST == 'localhost':
log.warn('EMAIL_HOST not configured!')
<commit_msg>Throw error instead of logging.<commit_after>
|
#!/usr/bin/env python
from config.settings import *
SETTINGS = 'deployed'
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# Static media
STATIC_ROOT = '/mnt/media'
# Uploads
MEDIA_ROOT = '/mnt/panda'
# Django-compressor
COMPRESS_ENABLED = True
if EMAIL_HOST == 'localhost':
raise ValueError('EMAIL_HOST not configured!')
|
#!/usr/bin/env python
from config.settings import *
import logging
log = logging.getLogger('settings')
SETTINGS = 'deployed'
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# Static media
STATIC_ROOT = '/mnt/media'
# Uploads
MEDIA_ROOT = '/mnt/panda'
# Django-compressor
COMPRESS_ENABLED = True
if EMAIL_HOST == 'localhost':
log.warn('EMAIL_HOST not configured!')
Throw error instead of logging.#!/usr/bin/env python
from config.settings import *
SETTINGS = 'deployed'
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# Static media
STATIC_ROOT = '/mnt/media'
# Uploads
MEDIA_ROOT = '/mnt/panda'
# Django-compressor
COMPRESS_ENABLED = True
if EMAIL_HOST == 'localhost':
raise ValueError('EMAIL_HOST not configured!')
|
<commit_before>#!/usr/bin/env python
from config.settings import *
import logging
log = logging.getLogger('settings')
SETTINGS = 'deployed'
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# Static media
STATIC_ROOT = '/mnt/media'
# Uploads
MEDIA_ROOT = '/mnt/panda'
# Django-compressor
COMPRESS_ENABLED = True
if EMAIL_HOST == 'localhost':
log.warn('EMAIL_HOST not configured!')
<commit_msg>Throw error instead of logging.<commit_after>#!/usr/bin/env python
from config.settings import *
SETTINGS = 'deployed'
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# Static media
STATIC_ROOT = '/mnt/media'
# Uploads
MEDIA_ROOT = '/mnt/panda'
# Django-compressor
COMPRESS_ENABLED = True
if EMAIL_HOST == 'localhost':
raise ValueError('EMAIL_HOST not configured!')
|
29d41cf99f66aa075bda5fed6feb78cbb9ccdd74
|
tests/dojo_test.py
|
tests/dojo_test.py
|
import unittest
from src.dojo import Dojo
class TestCreateRoom (unittest.TestCase):
def setUp(self):
self.dojo = Dojo()
self.test_office = self.dojo.create_room("office", "test")
self.test_living_space = self.dojo.create_room("living_space", "test living space")
def test_create_room_successfully(self):
initial_room_count = len(self.dojo.all_rooms)
blue_office = self.dojo.create_room("office", "Blue")
self.assertTrue(blue_office)
new_room_count = len(self.dojo.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 1)
def test_create_rooms_successfully(self):
initial_room_count = len(self.dojo.all_rooms)
offices = self.dojo.create_room("office", "Blue", "Black", "Brown")
self.assertTrue(offices)
new_room_count = len(self.dojo.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 3)
|
import unittest
from src.dojo import Dojo
class TestCreateRoom (unittest.TestCase):
def setUp(self):
self.dojo = Dojo()
self.test_office = self.dojo.create_room("office", "test")
self.test_living_space = self.dojo.create_room("living_space", "test living space")
def test_create_room_successfully(self):
initial_room_count = len(self.dojo.all_rooms)
blue_office = self.dojo.create_room("office", "Blue")
self.assertTrue(blue_office)
new_room_count = len(self.dojo.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 1)
def test_create_rooms_successfully(self):
initial_room_count = len(self.dojo.all_rooms)
offices = self.dojo.create_room("office", "Blue", "Black", "Brown")
self.assertTrue(offices)
new_room_count = len(self.dojo.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 3)
def test_addition_of_duplicate_room_names(self):
pass
|
Add test for duplicate rooms
|
Add test for duplicate rooms
|
Python
|
mit
|
EdwinKato/Space-Allocator,EdwinKato/Space-Allocator
|
import unittest
from src.dojo import Dojo
class TestCreateRoom (unittest.TestCase):
def setUp(self):
self.dojo = Dojo()
self.test_office = self.dojo.create_room("office", "test")
self.test_living_space = self.dojo.create_room("living_space", "test living space")
def test_create_room_successfully(self):
initial_room_count = len(self.dojo.all_rooms)
blue_office = self.dojo.create_room("office", "Blue")
self.assertTrue(blue_office)
new_room_count = len(self.dojo.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 1)
def test_create_rooms_successfully(self):
initial_room_count = len(self.dojo.all_rooms)
offices = self.dojo.create_room("office", "Blue", "Black", "Brown")
self.assertTrue(offices)
new_room_count = len(self.dojo.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 3)Add test for duplicate rooms
|
import unittest
from src.dojo import Dojo
class TestCreateRoom (unittest.TestCase):
def setUp(self):
self.dojo = Dojo()
self.test_office = self.dojo.create_room("office", "test")
self.test_living_space = self.dojo.create_room("living_space", "test living space")
def test_create_room_successfully(self):
initial_room_count = len(self.dojo.all_rooms)
blue_office = self.dojo.create_room("office", "Blue")
self.assertTrue(blue_office)
new_room_count = len(self.dojo.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 1)
def test_create_rooms_successfully(self):
initial_room_count = len(self.dojo.all_rooms)
offices = self.dojo.create_room("office", "Blue", "Black", "Brown")
self.assertTrue(offices)
new_room_count = len(self.dojo.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 3)
def test_addition_of_duplicate_room_names(self):
pass
|
<commit_before>import unittest
from src.dojo import Dojo
class TestCreateRoom (unittest.TestCase):
def setUp(self):
self.dojo = Dojo()
self.test_office = self.dojo.create_room("office", "test")
self.test_living_space = self.dojo.create_room("living_space", "test living space")
def test_create_room_successfully(self):
initial_room_count = len(self.dojo.all_rooms)
blue_office = self.dojo.create_room("office", "Blue")
self.assertTrue(blue_office)
new_room_count = len(self.dojo.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 1)
def test_create_rooms_successfully(self):
initial_room_count = len(self.dojo.all_rooms)
offices = self.dojo.create_room("office", "Blue", "Black", "Brown")
self.assertTrue(offices)
new_room_count = len(self.dojo.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 3)<commit_msg>Add test for duplicate rooms<commit_after>
|
import unittest
from src.dojo import Dojo
class TestCreateRoom (unittest.TestCase):
def setUp(self):
self.dojo = Dojo()
self.test_office = self.dojo.create_room("office", "test")
self.test_living_space = self.dojo.create_room("living_space", "test living space")
def test_create_room_successfully(self):
initial_room_count = len(self.dojo.all_rooms)
blue_office = self.dojo.create_room("office", "Blue")
self.assertTrue(blue_office)
new_room_count = len(self.dojo.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 1)
def test_create_rooms_successfully(self):
initial_room_count = len(self.dojo.all_rooms)
offices = self.dojo.create_room("office", "Blue", "Black", "Brown")
self.assertTrue(offices)
new_room_count = len(self.dojo.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 3)
def test_addition_of_duplicate_room_names(self):
pass
|
import unittest
from src.dojo import Dojo
class TestCreateRoom (unittest.TestCase):
def setUp(self):
self.dojo = Dojo()
self.test_office = self.dojo.create_room("office", "test")
self.test_living_space = self.dojo.create_room("living_space", "test living space")
def test_create_room_successfully(self):
initial_room_count = len(self.dojo.all_rooms)
blue_office = self.dojo.create_room("office", "Blue")
self.assertTrue(blue_office)
new_room_count = len(self.dojo.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 1)
def test_create_rooms_successfully(self):
initial_room_count = len(self.dojo.all_rooms)
offices = self.dojo.create_room("office", "Blue", "Black", "Brown")
self.assertTrue(offices)
new_room_count = len(self.dojo.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 3)Add test for duplicate roomsimport unittest
from src.dojo import Dojo
class TestCreateRoom (unittest.TestCase):
def setUp(self):
self.dojo = Dojo()
self.test_office = self.dojo.create_room("office", "test")
self.test_living_space = self.dojo.create_room("living_space", "test living space")
def test_create_room_successfully(self):
initial_room_count = len(self.dojo.all_rooms)
blue_office = self.dojo.create_room("office", "Blue")
self.assertTrue(blue_office)
new_room_count = len(self.dojo.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 1)
def test_create_rooms_successfully(self):
initial_room_count = len(self.dojo.all_rooms)
offices = self.dojo.create_room("office", "Blue", "Black", "Brown")
self.assertTrue(offices)
new_room_count = len(self.dojo.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 3)
def test_addition_of_duplicate_room_names(self):
pass
|
<commit_before>import unittest
from src.dojo import Dojo
class TestCreateRoom (unittest.TestCase):
def setUp(self):
self.dojo = Dojo()
self.test_office = self.dojo.create_room("office", "test")
self.test_living_space = self.dojo.create_room("living_space", "test living space")
def test_create_room_successfully(self):
initial_room_count = len(self.dojo.all_rooms)
blue_office = self.dojo.create_room("office", "Blue")
self.assertTrue(blue_office)
new_room_count = len(self.dojo.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 1)
def test_create_rooms_successfully(self):
initial_room_count = len(self.dojo.all_rooms)
offices = self.dojo.create_room("office", "Blue", "Black", "Brown")
self.assertTrue(offices)
new_room_count = len(self.dojo.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 3)<commit_msg>Add test for duplicate rooms<commit_after>import unittest
from src.dojo import Dojo
class TestCreateRoom (unittest.TestCase):
def setUp(self):
self.dojo = Dojo()
self.test_office = self.dojo.create_room("office", "test")
self.test_living_space = self.dojo.create_room("living_space", "test living space")
def test_create_room_successfully(self):
initial_room_count = len(self.dojo.all_rooms)
blue_office = self.dojo.create_room("office", "Blue")
self.assertTrue(blue_office)
new_room_count = len(self.dojo.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 1)
def test_create_rooms_successfully(self):
initial_room_count = len(self.dojo.all_rooms)
offices = self.dojo.create_room("office", "Blue", "Black", "Brown")
self.assertTrue(offices)
new_room_count = len(self.dojo.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 3)
def test_addition_of_duplicate_room_names(self):
pass
|
beb224f23403e0f7e4676aca156420420fe3653f
|
tests/dojo_test.py
|
tests/dojo_test.py
|
import unittest
from src.dojo import Dojo
class TestCreateRoom (unittest.TestCase):
def test_create_room_successfully(self):
my_class_instance = Dojo()
initial_room_count = len(my_class_instance.all_rooms)
blue_office = my_class_instance.create_room("office", "Blue")
self.assertTrue(blue_office)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 1)
def test_create_rooms_successfully(self):
my_class_instance = Dojo()
initial_room_count = len(my_class_instance.all_rooms)
offices = my_class_instance.create_room("office", "Blue", "Black", "Brown")
self.assertTrue(offices)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 3)
def test_person_added_to_system(self):
initial_person_count = len(self.dojo.all_people)
person = self.dojo.add_person("Neil", "Armstrong", "Staff")
self.assertTrue(person)
new_person_count = len(self.dojo.all_people)
self.assertEqual(new_person_count - initial_person_count, 1)
|
import unittest
from src.dojo import Dojo
class TestCreateRoom (unittest.TestCase):
def test_create_room_successfully(self):
my_class_instance = Dojo()
initial_room_count = len(my_class_instance.all_rooms)
blue_office = my_class_instance.create_room("office", "Blue")
self.assertTrue(blue_office)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 1)
def test_create_rooms_successfully(self):
my_class_instance = Dojo()
initial_room_count = len(my_class_instance.all_rooms)
offices = my_class_instance.create_room("office", "Blue", "Black", "Brown")
self.assertTrue(offices)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 3)
def test_person_added_to_system(self):
initial_person_count = len(self.dojo.all_people)
person = self.dojo.add_person("Neil", "Armstrong", "Staff")
self.assertTrue(person)
new_person_count = len(self.dojo.all_people)
self.assertEqual(new_person_count - initial_person_count, 1)
def test_person_has_been_assigned_office(self):
person = self.dojo.add_person("Neil", "Armstrong", "Staff")
self.assertTrue(person)
self.assertTrue(self.dojo.all_people[-1].has_office)
|
Add test to check that person has been given office
|
Add test to check that person has been given office
|
Python
|
mit
|
EdwinKato/Space-Allocator,EdwinKato/Space-Allocator
|
import unittest
from src.dojo import Dojo
class TestCreateRoom (unittest.TestCase):
def test_create_room_successfully(self):
my_class_instance = Dojo()
initial_room_count = len(my_class_instance.all_rooms)
blue_office = my_class_instance.create_room("office", "Blue")
self.assertTrue(blue_office)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 1)
def test_create_rooms_successfully(self):
my_class_instance = Dojo()
initial_room_count = len(my_class_instance.all_rooms)
offices = my_class_instance.create_room("office", "Blue", "Black", "Brown")
self.assertTrue(offices)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 3)
def test_person_added_to_system(self):
initial_person_count = len(self.dojo.all_people)
person = self.dojo.add_person("Neil", "Armstrong", "Staff")
self.assertTrue(person)
new_person_count = len(self.dojo.all_people)
self.assertEqual(new_person_count - initial_person_count, 1)Add test to check that person has been given office
|
import unittest
from src.dojo import Dojo
class TestCreateRoom (unittest.TestCase):
def test_create_room_successfully(self):
my_class_instance = Dojo()
initial_room_count = len(my_class_instance.all_rooms)
blue_office = my_class_instance.create_room("office", "Blue")
self.assertTrue(blue_office)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 1)
def test_create_rooms_successfully(self):
my_class_instance = Dojo()
initial_room_count = len(my_class_instance.all_rooms)
offices = my_class_instance.create_room("office", "Blue", "Black", "Brown")
self.assertTrue(offices)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 3)
def test_person_added_to_system(self):
initial_person_count = len(self.dojo.all_people)
person = self.dojo.add_person("Neil", "Armstrong", "Staff")
self.assertTrue(person)
new_person_count = len(self.dojo.all_people)
self.assertEqual(new_person_count - initial_person_count, 1)
def test_person_has_been_assigned_office(self):
person = self.dojo.add_person("Neil", "Armstrong", "Staff")
self.assertTrue(person)
self.assertTrue(self.dojo.all_people[-1].has_office)
|
<commit_before>import unittest
from src.dojo import Dojo
class TestCreateRoom (unittest.TestCase):
def test_create_room_successfully(self):
my_class_instance = Dojo()
initial_room_count = len(my_class_instance.all_rooms)
blue_office = my_class_instance.create_room("office", "Blue")
self.assertTrue(blue_office)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 1)
def test_create_rooms_successfully(self):
my_class_instance = Dojo()
initial_room_count = len(my_class_instance.all_rooms)
offices = my_class_instance.create_room("office", "Blue", "Black", "Brown")
self.assertTrue(offices)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 3)
def test_person_added_to_system(self):
initial_person_count = len(self.dojo.all_people)
person = self.dojo.add_person("Neil", "Armstrong", "Staff")
self.assertTrue(person)
new_person_count = len(self.dojo.all_people)
self.assertEqual(new_person_count - initial_person_count, 1)<commit_msg>Add test to check that person has been given office<commit_after>
|
import unittest
from src.dojo import Dojo
class TestCreateRoom (unittest.TestCase):
def test_create_room_successfully(self):
my_class_instance = Dojo()
initial_room_count = len(my_class_instance.all_rooms)
blue_office = my_class_instance.create_room("office", "Blue")
self.assertTrue(blue_office)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 1)
def test_create_rooms_successfully(self):
my_class_instance = Dojo()
initial_room_count = len(my_class_instance.all_rooms)
offices = my_class_instance.create_room("office", "Blue", "Black", "Brown")
self.assertTrue(offices)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 3)
def test_person_added_to_system(self):
initial_person_count = len(self.dojo.all_people)
person = self.dojo.add_person("Neil", "Armstrong", "Staff")
self.assertTrue(person)
new_person_count = len(self.dojo.all_people)
self.assertEqual(new_person_count - initial_person_count, 1)
def test_person_has_been_assigned_office(self):
person = self.dojo.add_person("Neil", "Armstrong", "Staff")
self.assertTrue(person)
self.assertTrue(self.dojo.all_people[-1].has_office)
|
import unittest
from src.dojo import Dojo
class TestCreateRoom (unittest.TestCase):
def test_create_room_successfully(self):
my_class_instance = Dojo()
initial_room_count = len(my_class_instance.all_rooms)
blue_office = my_class_instance.create_room("office", "Blue")
self.assertTrue(blue_office)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 1)
def test_create_rooms_successfully(self):
my_class_instance = Dojo()
initial_room_count = len(my_class_instance.all_rooms)
offices = my_class_instance.create_room("office", "Blue", "Black", "Brown")
self.assertTrue(offices)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 3)
def test_person_added_to_system(self):
initial_person_count = len(self.dojo.all_people)
person = self.dojo.add_person("Neil", "Armstrong", "Staff")
self.assertTrue(person)
new_person_count = len(self.dojo.all_people)
self.assertEqual(new_person_count - initial_person_count, 1)Add test to check that person has been given officeimport unittest
from src.dojo import Dojo
class TestCreateRoom (unittest.TestCase):
def test_create_room_successfully(self):
my_class_instance = Dojo()
initial_room_count = len(my_class_instance.all_rooms)
blue_office = my_class_instance.create_room("office", "Blue")
self.assertTrue(blue_office)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 1)
def test_create_rooms_successfully(self):
my_class_instance = Dojo()
initial_room_count = len(my_class_instance.all_rooms)
offices = my_class_instance.create_room("office", "Blue", "Black", "Brown")
self.assertTrue(offices)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 3)
def test_person_added_to_system(self):
initial_person_count = len(self.dojo.all_people)
person = self.dojo.add_person("Neil", "Armstrong", "Staff")
self.assertTrue(person)
new_person_count = len(self.dojo.all_people)
self.assertEqual(new_person_count - initial_person_count, 1)
def test_person_has_been_assigned_office(self):
person = self.dojo.add_person("Neil", "Armstrong", "Staff")
self.assertTrue(person)
self.assertTrue(self.dojo.all_people[-1].has_office)
|
<commit_before>import unittest
from src.dojo import Dojo
class TestCreateRoom (unittest.TestCase):
def test_create_room_successfully(self):
my_class_instance = Dojo()
initial_room_count = len(my_class_instance.all_rooms)
blue_office = my_class_instance.create_room("office", "Blue")
self.assertTrue(blue_office)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 1)
def test_create_rooms_successfully(self):
my_class_instance = Dojo()
initial_room_count = len(my_class_instance.all_rooms)
offices = my_class_instance.create_room("office", "Blue", "Black", "Brown")
self.assertTrue(offices)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 3)
def test_person_added_to_system(self):
initial_person_count = len(self.dojo.all_people)
person = self.dojo.add_person("Neil", "Armstrong", "Staff")
self.assertTrue(person)
new_person_count = len(self.dojo.all_people)
self.assertEqual(new_person_count - initial_person_count, 1)<commit_msg>Add test to check that person has been given office<commit_after>import unittest
from src.dojo import Dojo
class TestCreateRoom (unittest.TestCase):
def test_create_room_successfully(self):
my_class_instance = Dojo()
initial_room_count = len(my_class_instance.all_rooms)
blue_office = my_class_instance.create_room("office", "Blue")
self.assertTrue(blue_office)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 1)
def test_create_rooms_successfully(self):
my_class_instance = Dojo()
initial_room_count = len(my_class_instance.all_rooms)
offices = my_class_instance.create_room("office", "Blue", "Black", "Brown")
self.assertTrue(offices)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 3)
def test_person_added_to_system(self):
initial_person_count = len(self.dojo.all_people)
person = self.dojo.add_person("Neil", "Armstrong", "Staff")
self.assertTrue(person)
new_person_count = len(self.dojo.all_people)
self.assertEqual(new_person_count - initial_person_count, 1)
def test_person_has_been_assigned_office(self):
person = self.dojo.add_person("Neil", "Armstrong", "Staff")
self.assertTrue(person)
self.assertTrue(self.dojo.all_people[-1].has_office)
|
2b1e60a9910561de5a71e83d042b845f6be0bc73
|
__init__.py
|
__init__.py
|
from . import platform_specific, input
from .graphics import screen
from .run_loop import main_run_loop, every
platform_specific.fixup_env()
def run():
main_run_loop.add_wait_callback(input.check_for_quit_event)
main_run_loop.add_after_action_callback(screen.after_loop)
main_run_loop.run()
|
from . import platform_specific, input
from .graphics import screen
from .run_loop import main_run_loop, every
platform_specific.fixup_env()
def run(loop=None):
if loop is not None:
every(seconds=1.0/30)(loop)
main_run_loop.add_wait_callback(input.check_for_quit_event)
main_run_loop.add_after_action_callback(screen.after_loop)
main_run_loop.run()
|
Allow run argument to avoid @every template
|
Allow run argument to avoid @every template
|
Python
|
bsd-2-clause
|
furbrain/tingbot-python
|
from . import platform_specific, input
from .graphics import screen
from .run_loop import main_run_loop, every
platform_specific.fixup_env()
def run():
main_run_loop.add_wait_callback(input.check_for_quit_event)
main_run_loop.add_after_action_callback(screen.after_loop)
main_run_loop.run()
Allow run argument to avoid @every template
|
from . import platform_specific, input
from .graphics import screen
from .run_loop import main_run_loop, every
platform_specific.fixup_env()
def run(loop=None):
if loop is not None:
every(seconds=1.0/30)(loop)
main_run_loop.add_wait_callback(input.check_for_quit_event)
main_run_loop.add_after_action_callback(screen.after_loop)
main_run_loop.run()
|
<commit_before>from . import platform_specific, input
from .graphics import screen
from .run_loop import main_run_loop, every
platform_specific.fixup_env()
def run():
main_run_loop.add_wait_callback(input.check_for_quit_event)
main_run_loop.add_after_action_callback(screen.after_loop)
main_run_loop.run()
<commit_msg>Allow run argument to avoid @every template<commit_after>
|
from . import platform_specific, input
from .graphics import screen
from .run_loop import main_run_loop, every
platform_specific.fixup_env()
def run(loop=None):
if loop is not None:
every(seconds=1.0/30)(loop)
main_run_loop.add_wait_callback(input.check_for_quit_event)
main_run_loop.add_after_action_callback(screen.after_loop)
main_run_loop.run()
|
from . import platform_specific, input
from .graphics import screen
from .run_loop import main_run_loop, every
platform_specific.fixup_env()
def run():
main_run_loop.add_wait_callback(input.check_for_quit_event)
main_run_loop.add_after_action_callback(screen.after_loop)
main_run_loop.run()
Allow run argument to avoid @every templatefrom . import platform_specific, input
from .graphics import screen
from .run_loop import main_run_loop, every
platform_specific.fixup_env()
def run(loop=None):
if loop is not None:
every(seconds=1.0/30)(loop)
main_run_loop.add_wait_callback(input.check_for_quit_event)
main_run_loop.add_after_action_callback(screen.after_loop)
main_run_loop.run()
|
<commit_before>from . import platform_specific, input
from .graphics import screen
from .run_loop import main_run_loop, every
platform_specific.fixup_env()
def run():
main_run_loop.add_wait_callback(input.check_for_quit_event)
main_run_loop.add_after_action_callback(screen.after_loop)
main_run_loop.run()
<commit_msg>Allow run argument to avoid @every template<commit_after>from . import platform_specific, input
from .graphics import screen
from .run_loop import main_run_loop, every
platform_specific.fixup_env()
def run(loop=None):
if loop is not None:
every(seconds=1.0/30)(loop)
main_run_loop.add_wait_callback(input.check_for_quit_event)
main_run_loop.add_after_action_callback(screen.after_loop)
main_run_loop.run()
|
0d42aa0158bb4f13098bdb5341bead9b1d7c686a
|
__init__.py
|
__init__.py
|
from django.core.mail import mail_managers
from django.dispatch import dispatcher
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.contrib.comments.signals import comment_was_posted
from kamu.comments.models import KamuComment
import settings
def comment_notification(sender, comment, request, **kwargs):
subject = 'New comment on %s' % str(comment.content_object)
msg = u'Comment from: %s (%s)\n\n' % (comment.user_name, request.META['REMOTE_ADDR'])
msg += u'Comment text:\n\n%s\n' % comment.comment
mail_managers(subject, msg, fail_silently=True)
comment_was_posted.connect(comment_notification, sender=KamuComment)
def user_notification(sender, instance, **kwargs):
user = instance
subject = u"New user '%s' created" % (user.username)
msg = u"Email '%s'\n" % (user.email)
mail_managers(subject, msg, fail_silently=True)
post_save.connect(user_notification, sender=User)
|
from django.core.mail import mail_managers
from django.dispatch import dispatcher
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.contrib.comments.signals import comment_was_posted
from kamu.comments.models import KamuComment
import settings
def comment_notification(sender, comment, request, **kwargs):
subject = 'New comment on %s' % str(comment.content_object)
msg = u'Comment from: %s (%s)\n\n' % (comment.user_name, request.META['REMOTE_ADDR'])
msg += u'Comment text:\n\n%s\n' % comment.comment
mail_managers(subject, msg, fail_silently=True)
comment_was_posted.connect(comment_notification, sender=KamuComment)
def user_notification(sender, instance, **kwargs):
if (not 'created' in kwargs) or (not kwargs['created']):
return
user = instance
subject = u"New user '%s' created" % (user.username)
msg = u"Email '%s'\n" % (user.email)
mail_managers(subject, msg, fail_silently=True)
post_save.connect(user_notification, sender=User)
|
Make sure to send email only when a new user is created
|
Make sure to send email only when a new user is created
|
Python
|
agpl-3.0
|
kansanmuisti/kamu,kansanmuisti/kamu,kansanmuisti/kamu,kansanmuisti/kamu,kansanmuisti/kamu
|
from django.core.mail import mail_managers
from django.dispatch import dispatcher
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.contrib.comments.signals import comment_was_posted
from kamu.comments.models import KamuComment
import settings
def comment_notification(sender, comment, request, **kwargs):
subject = 'New comment on %s' % str(comment.content_object)
msg = u'Comment from: %s (%s)\n\n' % (comment.user_name, request.META['REMOTE_ADDR'])
msg += u'Comment text:\n\n%s\n' % comment.comment
mail_managers(subject, msg, fail_silently=True)
comment_was_posted.connect(comment_notification, sender=KamuComment)
def user_notification(sender, instance, **kwargs):
user = instance
subject = u"New user '%s' created" % (user.username)
msg = u"Email '%s'\n" % (user.email)
mail_managers(subject, msg, fail_silently=True)
post_save.connect(user_notification, sender=User)
Make sure to send email only when a new user is created
|
from django.core.mail import mail_managers
from django.dispatch import dispatcher
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.contrib.comments.signals import comment_was_posted
from kamu.comments.models import KamuComment
import settings
def comment_notification(sender, comment, request, **kwargs):
subject = 'New comment on %s' % str(comment.content_object)
msg = u'Comment from: %s (%s)\n\n' % (comment.user_name, request.META['REMOTE_ADDR'])
msg += u'Comment text:\n\n%s\n' % comment.comment
mail_managers(subject, msg, fail_silently=True)
comment_was_posted.connect(comment_notification, sender=KamuComment)
def user_notification(sender, instance, **kwargs):
if (not 'created' in kwargs) or (not kwargs['created']):
return
user = instance
subject = u"New user '%s' created" % (user.username)
msg = u"Email '%s'\n" % (user.email)
mail_managers(subject, msg, fail_silently=True)
post_save.connect(user_notification, sender=User)
|
<commit_before>from django.core.mail import mail_managers
from django.dispatch import dispatcher
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.contrib.comments.signals import comment_was_posted
from kamu.comments.models import KamuComment
import settings
def comment_notification(sender, comment, request, **kwargs):
subject = 'New comment on %s' % str(comment.content_object)
msg = u'Comment from: %s (%s)\n\n' % (comment.user_name, request.META['REMOTE_ADDR'])
msg += u'Comment text:\n\n%s\n' % comment.comment
mail_managers(subject, msg, fail_silently=True)
comment_was_posted.connect(comment_notification, sender=KamuComment)
def user_notification(sender, instance, **kwargs):
user = instance
subject = u"New user '%s' created" % (user.username)
msg = u"Email '%s'\n" % (user.email)
mail_managers(subject, msg, fail_silently=True)
post_save.connect(user_notification, sender=User)
<commit_msg>Make sure to send email only when a new user is created<commit_after>
|
from django.core.mail import mail_managers
from django.dispatch import dispatcher
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.contrib.comments.signals import comment_was_posted
from kamu.comments.models import KamuComment
import settings
def comment_notification(sender, comment, request, **kwargs):
subject = 'New comment on %s' % str(comment.content_object)
msg = u'Comment from: %s (%s)\n\n' % (comment.user_name, request.META['REMOTE_ADDR'])
msg += u'Comment text:\n\n%s\n' % comment.comment
mail_managers(subject, msg, fail_silently=True)
comment_was_posted.connect(comment_notification, sender=KamuComment)
def user_notification(sender, instance, **kwargs):
if (not 'created' in kwargs) or (not kwargs['created']):
return
user = instance
subject = u"New user '%s' created" % (user.username)
msg = u"Email '%s'\n" % (user.email)
mail_managers(subject, msg, fail_silently=True)
post_save.connect(user_notification, sender=User)
|
from django.core.mail import mail_managers
from django.dispatch import dispatcher
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.contrib.comments.signals import comment_was_posted
from kamu.comments.models import KamuComment
import settings
def comment_notification(sender, comment, request, **kwargs):
subject = 'New comment on %s' % str(comment.content_object)
msg = u'Comment from: %s (%s)\n\n' % (comment.user_name, request.META['REMOTE_ADDR'])
msg += u'Comment text:\n\n%s\n' % comment.comment
mail_managers(subject, msg, fail_silently=True)
comment_was_posted.connect(comment_notification, sender=KamuComment)
def user_notification(sender, instance, **kwargs):
user = instance
subject = u"New user '%s' created" % (user.username)
msg = u"Email '%s'\n" % (user.email)
mail_managers(subject, msg, fail_silently=True)
post_save.connect(user_notification, sender=User)
Make sure to send email only when a new user is createdfrom django.core.mail import mail_managers
from django.dispatch import dispatcher
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.contrib.comments.signals import comment_was_posted
from kamu.comments.models import KamuComment
import settings
def comment_notification(sender, comment, request, **kwargs):
subject = 'New comment on %s' % str(comment.content_object)
msg = u'Comment from: %s (%s)\n\n' % (comment.user_name, request.META['REMOTE_ADDR'])
msg += u'Comment text:\n\n%s\n' % comment.comment
mail_managers(subject, msg, fail_silently=True)
comment_was_posted.connect(comment_notification, sender=KamuComment)
def user_notification(sender, instance, **kwargs):
if (not 'created' in kwargs) or (not kwargs['created']):
return
user = instance
subject = u"New user '%s' created" % (user.username)
msg = u"Email '%s'\n" % (user.email)
mail_managers(subject, msg, fail_silently=True)
post_save.connect(user_notification, sender=User)
|
<commit_before>from django.core.mail import mail_managers
from django.dispatch import dispatcher
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.contrib.comments.signals import comment_was_posted
from kamu.comments.models import KamuComment
import settings
def comment_notification(sender, comment, request, **kwargs):
subject = 'New comment on %s' % str(comment.content_object)
msg = u'Comment from: %s (%s)\n\n' % (comment.user_name, request.META['REMOTE_ADDR'])
msg += u'Comment text:\n\n%s\n' % comment.comment
mail_managers(subject, msg, fail_silently=True)
comment_was_posted.connect(comment_notification, sender=KamuComment)
def user_notification(sender, instance, **kwargs):
user = instance
subject = u"New user '%s' created" % (user.username)
msg = u"Email '%s'\n" % (user.email)
mail_managers(subject, msg, fail_silently=True)
post_save.connect(user_notification, sender=User)
<commit_msg>Make sure to send email only when a new user is created<commit_after>from django.core.mail import mail_managers
from django.dispatch import dispatcher
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.contrib.comments.signals import comment_was_posted
from kamu.comments.models import KamuComment
import settings
def comment_notification(sender, comment, request, **kwargs):
subject = 'New comment on %s' % str(comment.content_object)
msg = u'Comment from: %s (%s)\n\n' % (comment.user_name, request.META['REMOTE_ADDR'])
msg += u'Comment text:\n\n%s\n' % comment.comment
mail_managers(subject, msg, fail_silently=True)
comment_was_posted.connect(comment_notification, sender=KamuComment)
def user_notification(sender, instance, **kwargs):
if (not 'created' in kwargs) or (not kwargs['created']):
return
user = instance
subject = u"New user '%s' created" % (user.username)
msg = u"Email '%s'\n" % (user.email)
mail_managers(subject, msg, fail_silently=True)
post_save.connect(user_notification, sender=User)
|
7c3edfb8971331c0058ce6426e10239f57cbfc97
|
app.py
|
app.py
|
import requests
from flask import Flask, render_template
app = Flask(__name__, instance_relative_config=True)
app.config.from_pyfile("appconfig.py")
BBC_id= "bbc-news"
@app.route("/")
def index():
r = requests.get(
f"https://newsapi.org/v1/articles?source={BBC_id}&sortBy=top&apiKey={app.config['API_KEY']}"
)
return render_template("index.html", articles=r.json().get("articles"))
if __name__ == "__main__":
app.run()
|
import requests
from flask import Flask, render_template
app = Flask(__name__, instance_relative_config=True)
app.config.from_pyfile("appconfig.py")
sources = {
"bbc": "bbc-news",
"cnn": "cnn",
"hackernews": "hacker-news"
}
def create_link(source):
if source in sources.keys():
return f"https://newsapi.org/v1/articles?source={sources[source]}&sortBy=top&apiKey={app.config['API_KEY']}"
@app.route("/")
@app.route("/<source>")
def index(source="bbc"):
r = requests.get(create_link(source))
return render_template("index.html", articles=r.json().get("articles"), source=source)
if __name__ == "__main__":
app.run()
|
Create dynamic routing for supported sources.
|
Create dynamic routing for supported sources.
|
Python
|
mit
|
alchermd/headlines,alchermd/headlines
|
import requests
from flask import Flask, render_template
app = Flask(__name__, instance_relative_config=True)
app.config.from_pyfile("appconfig.py")
BBC_id= "bbc-news"
@app.route("/")
def index():
r = requests.get(
f"https://newsapi.org/v1/articles?source={BBC_id}&sortBy=top&apiKey={app.config['API_KEY']}"
)
return render_template("index.html", articles=r.json().get("articles"))
if __name__ == "__main__":
app.run()Create dynamic routing for supported sources.
|
import requests
from flask import Flask, render_template
app = Flask(__name__, instance_relative_config=True)
app.config.from_pyfile("appconfig.py")
sources = {
"bbc": "bbc-news",
"cnn": "cnn",
"hackernews": "hacker-news"
}
def create_link(source):
if source in sources.keys():
return f"https://newsapi.org/v1/articles?source={sources[source]}&sortBy=top&apiKey={app.config['API_KEY']}"
@app.route("/")
@app.route("/<source>")
def index(source="bbc"):
r = requests.get(create_link(source))
return render_template("index.html", articles=r.json().get("articles"), source=source)
if __name__ == "__main__":
app.run()
|
<commit_before>import requests
from flask import Flask, render_template
app = Flask(__name__, instance_relative_config=True)
app.config.from_pyfile("appconfig.py")
BBC_id= "bbc-news"
@app.route("/")
def index():
r = requests.get(
f"https://newsapi.org/v1/articles?source={BBC_id}&sortBy=top&apiKey={app.config['API_KEY']}"
)
return render_template("index.html", articles=r.json().get("articles"))
if __name__ == "__main__":
app.run()<commit_msg>Create dynamic routing for supported sources.<commit_after>
|
import requests
from flask import Flask, render_template
app = Flask(__name__, instance_relative_config=True)
app.config.from_pyfile("appconfig.py")
sources = {
"bbc": "bbc-news",
"cnn": "cnn",
"hackernews": "hacker-news"
}
def create_link(source):
if source in sources.keys():
return f"https://newsapi.org/v1/articles?source={sources[source]}&sortBy=top&apiKey={app.config['API_KEY']}"
@app.route("/")
@app.route("/<source>")
def index(source="bbc"):
r = requests.get(create_link(source))
return render_template("index.html", articles=r.json().get("articles"), source=source)
if __name__ == "__main__":
app.run()
|
import requests
from flask import Flask, render_template
app = Flask(__name__, instance_relative_config=True)
app.config.from_pyfile("appconfig.py")
BBC_id= "bbc-news"
@app.route("/")
def index():
r = requests.get(
f"https://newsapi.org/v1/articles?source={BBC_id}&sortBy=top&apiKey={app.config['API_KEY']}"
)
return render_template("index.html", articles=r.json().get("articles"))
if __name__ == "__main__":
app.run()Create dynamic routing for supported sources.import requests
from flask import Flask, render_template
app = Flask(__name__, instance_relative_config=True)
app.config.from_pyfile("appconfig.py")
sources = {
"bbc": "bbc-news",
"cnn": "cnn",
"hackernews": "hacker-news"
}
def create_link(source):
if source in sources.keys():
return f"https://newsapi.org/v1/articles?source={sources[source]}&sortBy=top&apiKey={app.config['API_KEY']}"
@app.route("/")
@app.route("/<source>")
def index(source="bbc"):
r = requests.get(create_link(source))
return render_template("index.html", articles=r.json().get("articles"), source=source)
if __name__ == "__main__":
app.run()
|
<commit_before>import requests
from flask import Flask, render_template
app = Flask(__name__, instance_relative_config=True)
app.config.from_pyfile("appconfig.py")
BBC_id= "bbc-news"
@app.route("/")
def index():
r = requests.get(
f"https://newsapi.org/v1/articles?source={BBC_id}&sortBy=top&apiKey={app.config['API_KEY']}"
)
return render_template("index.html", articles=r.json().get("articles"))
if __name__ == "__main__":
app.run()<commit_msg>Create dynamic routing for supported sources.<commit_after>import requests
from flask import Flask, render_template
app = Flask(__name__, instance_relative_config=True)
app.config.from_pyfile("appconfig.py")
sources = {
"bbc": "bbc-news",
"cnn": "cnn",
"hackernews": "hacker-news"
}
def create_link(source):
if source in sources.keys():
return f"https://newsapi.org/v1/articles?source={sources[source]}&sortBy=top&apiKey={app.config['API_KEY']}"
@app.route("/")
@app.route("/<source>")
def index(source="bbc"):
r = requests.get(create_link(source))
return render_template("index.html", articles=r.json().get("articles"), source=source)
if __name__ == "__main__":
app.run()
|
6c53778132eeba03acbca718d76ad703615fadc6
|
troposphere/kms.py
|
troposphere/kms.py
|
# Copyright (c) 2012-2013, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, Tags
from .compat import policytypes
from .validators import boolean, integer_range, key_usage_type
class Alias(AWSObject):
resource_type = "AWS::KMS::Alias"
props = {
'AliasName': (basestring, True),
'TargetKeyId': (basestring, True)
}
class Key(AWSObject):
resource_type = "AWS::KMS::Key"
props = {
'Description': (basestring, False),
'Enabled': (boolean, False),
'EnableKeyRotation': (boolean, False),
'KeyPolicy': (policytypes, True),
'KeyUsage': (key_usage_type, False),
'PendingWindowInDays': (integer_range(7, 30), False),
'Tags': ((Tags, list), False)
}
|
# Copyright (c) 2012-2013, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, Tags
from .compat import policytypes
from .validators import boolean, integer_range, key_usage_type
class Alias(AWSObject):
resource_type = "AWS::KMS::Alias"
props = {
'AliasName': (basestring, True),
'TargetKeyId': (basestring, True)
}
class Key(AWSObject):
resource_type = "AWS::KMS::Key"
props = {
'Description': (basestring, False),
'EnableKeyRotation': (boolean, False),
'Enabled': (boolean, False),
'KeyPolicy': (policytypes, True),
'KeySpec': (basestring, False),
'KeyUsage': (key_usage_type, False),
'PendingWindowInDays': (integer_range(7, 30), False),
'Tags': ((Tags, list), False)
}
|
Update KMS per 2020-11-19 changes
|
Update KMS per 2020-11-19 changes
|
Python
|
bsd-2-clause
|
cloudtools/troposphere,cloudtools/troposphere
|
# Copyright (c) 2012-2013, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, Tags
from .compat import policytypes
from .validators import boolean, integer_range, key_usage_type
class Alias(AWSObject):
resource_type = "AWS::KMS::Alias"
props = {
'AliasName': (basestring, True),
'TargetKeyId': (basestring, True)
}
class Key(AWSObject):
resource_type = "AWS::KMS::Key"
props = {
'Description': (basestring, False),
'Enabled': (boolean, False),
'EnableKeyRotation': (boolean, False),
'KeyPolicy': (policytypes, True),
'KeyUsage': (key_usage_type, False),
'PendingWindowInDays': (integer_range(7, 30), False),
'Tags': ((Tags, list), False)
}
Update KMS per 2020-11-19 changes
|
# Copyright (c) 2012-2013, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, Tags
from .compat import policytypes
from .validators import boolean, integer_range, key_usage_type
class Alias(AWSObject):
resource_type = "AWS::KMS::Alias"
props = {
'AliasName': (basestring, True),
'TargetKeyId': (basestring, True)
}
class Key(AWSObject):
resource_type = "AWS::KMS::Key"
props = {
'Description': (basestring, False),
'EnableKeyRotation': (boolean, False),
'Enabled': (boolean, False),
'KeyPolicy': (policytypes, True),
'KeySpec': (basestring, False),
'KeyUsage': (key_usage_type, False),
'PendingWindowInDays': (integer_range(7, 30), False),
'Tags': ((Tags, list), False)
}
|
<commit_before># Copyright (c) 2012-2013, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, Tags
from .compat import policytypes
from .validators import boolean, integer_range, key_usage_type
class Alias(AWSObject):
resource_type = "AWS::KMS::Alias"
props = {
'AliasName': (basestring, True),
'TargetKeyId': (basestring, True)
}
class Key(AWSObject):
resource_type = "AWS::KMS::Key"
props = {
'Description': (basestring, False),
'Enabled': (boolean, False),
'EnableKeyRotation': (boolean, False),
'KeyPolicy': (policytypes, True),
'KeyUsage': (key_usage_type, False),
'PendingWindowInDays': (integer_range(7, 30), False),
'Tags': ((Tags, list), False)
}
<commit_msg>Update KMS per 2020-11-19 changes<commit_after>
|
# Copyright (c) 2012-2013, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, Tags
from .compat import policytypes
from .validators import boolean, integer_range, key_usage_type
class Alias(AWSObject):
resource_type = "AWS::KMS::Alias"
props = {
'AliasName': (basestring, True),
'TargetKeyId': (basestring, True)
}
class Key(AWSObject):
resource_type = "AWS::KMS::Key"
props = {
'Description': (basestring, False),
'EnableKeyRotation': (boolean, False),
'Enabled': (boolean, False),
'KeyPolicy': (policytypes, True),
'KeySpec': (basestring, False),
'KeyUsage': (key_usage_type, False),
'PendingWindowInDays': (integer_range(7, 30), False),
'Tags': ((Tags, list), False)
}
|
# Copyright (c) 2012-2013, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, Tags
from .compat import policytypes
from .validators import boolean, integer_range, key_usage_type
class Alias(AWSObject):
resource_type = "AWS::KMS::Alias"
props = {
'AliasName': (basestring, True),
'TargetKeyId': (basestring, True)
}
class Key(AWSObject):
resource_type = "AWS::KMS::Key"
props = {
'Description': (basestring, False),
'Enabled': (boolean, False),
'EnableKeyRotation': (boolean, False),
'KeyPolicy': (policytypes, True),
'KeyUsage': (key_usage_type, False),
'PendingWindowInDays': (integer_range(7, 30), False),
'Tags': ((Tags, list), False)
}
Update KMS per 2020-11-19 changes# Copyright (c) 2012-2013, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, Tags
from .compat import policytypes
from .validators import boolean, integer_range, key_usage_type
class Alias(AWSObject):
resource_type = "AWS::KMS::Alias"
props = {
'AliasName': (basestring, True),
'TargetKeyId': (basestring, True)
}
class Key(AWSObject):
resource_type = "AWS::KMS::Key"
props = {
'Description': (basestring, False),
'EnableKeyRotation': (boolean, False),
'Enabled': (boolean, False),
'KeyPolicy': (policytypes, True),
'KeySpec': (basestring, False),
'KeyUsage': (key_usage_type, False),
'PendingWindowInDays': (integer_range(7, 30), False),
'Tags': ((Tags, list), False)
}
|
<commit_before># Copyright (c) 2012-2013, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, Tags
from .compat import policytypes
from .validators import boolean, integer_range, key_usage_type
class Alias(AWSObject):
resource_type = "AWS::KMS::Alias"
props = {
'AliasName': (basestring, True),
'TargetKeyId': (basestring, True)
}
class Key(AWSObject):
resource_type = "AWS::KMS::Key"
props = {
'Description': (basestring, False),
'Enabled': (boolean, False),
'EnableKeyRotation': (boolean, False),
'KeyPolicy': (policytypes, True),
'KeyUsage': (key_usage_type, False),
'PendingWindowInDays': (integer_range(7, 30), False),
'Tags': ((Tags, list), False)
}
<commit_msg>Update KMS per 2020-11-19 changes<commit_after># Copyright (c) 2012-2013, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, Tags
from .compat import policytypes
from .validators import boolean, integer_range, key_usage_type
class Alias(AWSObject):
resource_type = "AWS::KMS::Alias"
props = {
'AliasName': (basestring, True),
'TargetKeyId': (basestring, True)
}
class Key(AWSObject):
resource_type = "AWS::KMS::Key"
props = {
'Description': (basestring, False),
'EnableKeyRotation': (boolean, False),
'Enabled': (boolean, False),
'KeyPolicy': (policytypes, True),
'KeySpec': (basestring, False),
'KeyUsage': (key_usage_type, False),
'PendingWindowInDays': (integer_range(7, 30), False),
'Tags': ((Tags, list), False)
}
|
31ea46e1ece2174bd5d16e2234576c4ca28a054d
|
pywikibot/families/wikia_family.py
|
pywikibot/families/wikia_family.py
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.15.1"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.16.2"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
|
Update a version number from trunk r9016
|
Update a version number from trunk r9016
|
Python
|
mit
|
azatoth/pywikipedia
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.15.1"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
Update a version number from trunk r9016
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.16.2"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
|
<commit_before># -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.15.1"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
<commit_msg>Update a version number from trunk r9016<commit_after>
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.16.2"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.15.1"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
Update a version number from trunk r9016# -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.16.2"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
|
<commit_before># -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.15.1"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
<commit_msg>Update a version number from trunk r9016<commit_after># -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.16.2"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
|
ad7e93fa74054e3d962e34807f5d04acd719df33
|
website/search_migration/migrate.py
|
website/search_migration/migrate.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''Migration script for Search-enabled Models.'''
from __future__ import absolute_import
import logging
from modularodm.query.querydialect import DefaultQueryDialect as Q
from website.models import Node
from framework.auth import User
import website.search.search as search
from website.app import init_app
logger = logging.getLogger(__name__)
app = init_app("website.settings", set_backends=True, routes=True)
def migrate_nodes():
nodes = Node.find(Q('is_public', 'eq', True) & Q('is_deleted', 'eq', False))
for i, node in enumerate(nodes):
node.update_search()
return i + 1 # Started counting from 0
def migrate_users():
for i, user in enumerate(User.find()):
if user.is_active:
user.update_search()
return i + 1 # Started counting from 0
def main():
ctx = app.test_request_context()
ctx.push()
search.delete_all()
search.create_index()
logger.info("Nodes migrated: {}".format(migrate_nodes()))
logger.info("Users migrated: {}".format(migrate_users()))
ctx.pop()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''Migration script for Search-enabled Models.'''
from __future__ import absolute_import
import logging
from modularodm.query.querydialect import DefaultQueryDialect as Q
from website.models import Node
from framework.auth import User
import website.search.search as search
from website.app import init_app
logger = logging.getLogger(__name__)
app = init_app("website.settings", set_backends=True, routes=True)
def migrate_nodes():
nodes = Node.find(Q('is_public', 'eq', True) & Q('is_deleted', 'eq', False))
for i, node in enumerate(nodes):
node.update_search()
logger.info('Nodes migrated: {}'.format(i + 1))
def migrate_users():
n_iter = 0
for i, user in enumerate(User.find()):
if user.is_active:
user.update_search()
n_iter += 1
logger.info('Users iterated: {0}\nUsers migrated: {1}'.format(i + 1, n_iter))
def main():
ctx = app.test_request_context()
ctx.push()
search.delete_all()
search.create_index()
migrate_nodes()
migrate_users()
ctx.pop()
if __name__ == '__main__':
main()
|
Add additional logging for users'
|
Add additional logging for users'
|
Python
|
apache-2.0
|
KAsante95/osf.io,hmoco/osf.io,petermalcolm/osf.io,amyshi188/osf.io,rdhyee/osf.io,samanehsan/osf.io,GaryKriebel/osf.io,mluo613/osf.io,ticklemepierce/osf.io,jnayak1/osf.io,GaryKriebel/osf.io,bdyetton/prettychart,mfraezz/osf.io,GaryKriebel/osf.io,ticklemepierce/osf.io,caneruguz/osf.io,crcresearch/osf.io,abought/osf.io,zachjanicki/osf.io,monikagrabowska/osf.io,caseyrygt/osf.io,baylee-d/osf.io,lyndsysimon/osf.io,billyhunt/osf.io,arpitar/osf.io,felliott/osf.io,GageGaskins/osf.io,jinluyuan/osf.io,reinaH/osf.io,billyhunt/osf.io,MerlinZhang/osf.io,caseyrygt/osf.io,kushG/osf.io,kch8qx/osf.io,kushG/osf.io,lyndsysimon/osf.io,dplorimer/osf,kwierman/osf.io,himanshuo/osf.io,dplorimer/osf,emetsger/osf.io,dplorimer/osf,kwierman/osf.io,barbour-em/osf.io,Nesiehr/osf.io,wearpants/osf.io,sloria/osf.io,chennan47/osf.io,cosenal/osf.io,binoculars/osf.io,cldershem/osf.io,adlius/osf.io,TomHeatwole/osf.io,zkraime/osf.io,caseyrygt/osf.io,laurenrevere/osf.io,leb2dg/osf.io,chrisseto/osf.io,revanthkolli/osf.io,jnayak1/osf.io,laurenrevere/osf.io,HalcyonChimera/osf.io,cldershem/osf.io,KAsante95/osf.io,laurenrevere/osf.io,emetsger/osf.io,Johnetordoff/osf.io,bdyetton/prettychart,doublebits/osf.io,saradbowman/osf.io,DanielSBrown/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,DanielSBrown/osf.io,jeffreyliu3230/osf.io,erinspace/osf.io,hmoco/osf.io,leb2dg/osf.io,alexschiller/osf.io,brandonPurvis/osf.io,jolene-esposito/osf.io,mluke93/osf.io,jeffreyliu3230/osf.io,HarryRybacki/osf.io,hmoco/osf.io,wearpants/osf.io,cwisecarver/osf.io,zachjanicki/osf.io,TomHeatwole/osf.io,icereval/osf.io,caseyrollins/osf.io,arpitar/osf.io,chrisseto/osf.io,Johnetordoff/osf.io,wearpants/osf.io,HarryRybacki/osf.io,amyshi188/osf.io,asanfilippo7/osf.io,Ghalko/osf.io,jmcarp/osf.io,kch8qx/osf.io,leb2dg/osf.io,KAsante95/osf.io,caseyrollins/osf.io,doublebits/osf.io,acshi/osf.io,HarryRybacki/osf.io,njantrania/osf.io,saradbowman/osf.io,lamdnhan/osf.io,acshi/osf.io,arpitar/osf.io,DanielSBrown/osf.io,baylee-d/osf.io,bdyetton/prettychart,jeffreyliu3230/osf.io,sbt9uc/osf.io,adlius/osf.io,alexschiller/osf.io,SSJohns/osf.io,cwisecarver/osf.io,mfraezz/osf.io,ckc6cz/osf.io,mluke93/osf.io,mluo613/osf.io,brianjgeiger/osf.io,jinluyuan/osf.io,sbt9uc/osf.io,kch8qx/osf.io,samchrisinger/osf.io,barbour-em/osf.io,mattclark/osf.io,zamattiac/osf.io,petermalcolm/osf.io,ticklemepierce/osf.io,fabianvf/osf.io,brandonPurvis/osf.io,lamdnhan/osf.io,monikagrabowska/osf.io,binoculars/osf.io,fabianvf/osf.io,fabianvf/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,adlius/osf.io,samchrisinger/osf.io,alexschiller/osf.io,kushG/osf.io,samanehsan/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,crcresearch/osf.io,brandonPurvis/osf.io,GageGaskins/osf.io,ZobairAlijan/osf.io,jmcarp/osf.io,RomanZWang/osf.io,mluke93/osf.io,barbour-em/osf.io,arpitar/osf.io,reinaH/osf.io,ckc6cz/osf.io,SSJohns/osf.io,Nesiehr/osf.io,revanthkolli/osf.io,kushG/osf.io,binoculars/osf.io,kwierman/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,fabianvf/osf.io,CenterForOpenScience/osf.io,CenterForOpenScience/osf.io,zamattiac/osf.io,CenterForOpenScience/osf.io,caneruguz/osf.io,amyshi188/osf.io,TomBaxter/osf.io,mattclark/osf.io,sbt9uc/osf.io,Ghalko/osf.io,abought/osf.io,felliott/osf.io,doublebits/osf.io,RomanZWang/osf.io,brandonPurvis/osf.io,cslzchen/osf.io,caneruguz/osf.io,sbt9uc/osf.io,cwisecarver/osf.io,aaxelb/osf.io,sloria/osf.io,alexschiller/osf.io,Johnetordoff/osf.io,RomanZWang/osf.io,zkraime/osf.io,billyhunt/osf.io,acshi/osf.io,MerlinZhang/osf.io,monikagrabowska/osf.io,lamdnhan/osf.io,sloria/osf.io,erinspace/osf.io,hmoco/osf.io,ckc6cz/osf.io,baylee-d/osf.io,emetsger/osf.io,jmcarp/osf.io,abought/osf.io,monikagrabowska/osf.io,njantrania/osf.io,cwisecarver/osf.io,asanfilippo7/osf.io,zamattiac/osf.io,ZobairAlijan/osf.io,lyndsysimon/osf.io,Ghalko/osf.io,jeffreyliu3230/osf.io,zkraime/osf.io,ZobairAlijan/osf.io,TomHeatwole/osf.io,mfraezz/osf.io,Nesiehr/osf.io,lamdnhan/osf.io,mluo613/osf.io,pattisdr/osf.io,zachjanicki/osf.io,HarryRybacki/osf.io,zkraime/osf.io,petermalcolm/osf.io,TomBaxter/osf.io,asanfilippo7/osf.io,KAsante95/osf.io,kwierman/osf.io,alexschiller/osf.io,brianjgeiger/osf.io,kch8qx/osf.io,revanthkolli/osf.io,barbour-em/osf.io,haoyuchen1992/osf.io,samchrisinger/osf.io,SSJohns/osf.io,asanfilippo7/osf.io,HalcyonChimera/osf.io,himanshuo/osf.io,rdhyee/osf.io,samchrisinger/osf.io,njantrania/osf.io,jolene-esposito/osf.io,cslzchen/osf.io,jnayak1/osf.io,doublebits/osf.io,jolene-esposito/osf.io,aaxelb/osf.io,njantrania/osf.io,cldershem/osf.io,SSJohns/osf.io,petermalcolm/osf.io,lyndsysimon/osf.io,felliott/osf.io,icereval/osf.io,cosenal/osf.io,revanthkolli/osf.io,adlius/osf.io,ZobairAlijan/osf.io,mluke93/osf.io,danielneis/osf.io,TomHeatwole/osf.io,cslzchen/osf.io,reinaH/osf.io,leb2dg/osf.io,caneruguz/osf.io,MerlinZhang/osf.io,RomanZWang/osf.io,pattisdr/osf.io,emetsger/osf.io,samanehsan/osf.io,cosenal/osf.io,himanshuo/osf.io,billyhunt/osf.io,acshi/osf.io,danielneis/osf.io,rdhyee/osf.io,cosenal/osf.io,billyhunt/osf.io,abought/osf.io,ticklemepierce/osf.io,mfraezz/osf.io,himanshuo/osf.io,kch8qx/osf.io,acshi/osf.io,wearpants/osf.io,brandonPurvis/osf.io,dplorimer/osf,TomBaxter/osf.io,Nesiehr/osf.io,amyshi188/osf.io,haoyuchen1992/osf.io,DanielSBrown/osf.io,chrisseto/osf.io,crcresearch/osf.io,MerlinZhang/osf.io,GaryKriebel/osf.io,caseyrygt/osf.io,jmcarp/osf.io,cslzchen/osf.io,danielneis/osf.io,haoyuchen1992/osf.io,zamattiac/osf.io,chennan47/osf.io,jolene-esposito/osf.io,chennan47/osf.io,jnayak1/osf.io,zachjanicki/osf.io,chrisseto/osf.io,caseyrollins/osf.io,mattclark/osf.io,mluo613/osf.io,doublebits/osf.io,erinspace/osf.io,icereval/osf.io,brianjgeiger/osf.io,bdyetton/prettychart,rdhyee/osf.io,pattisdr/osf.io,mluo613/osf.io,ckc6cz/osf.io,jinluyuan/osf.io,GageGaskins/osf.io,danielneis/osf.io,jinluyuan/osf.io,Ghalko/osf.io,monikagrabowska/osf.io,samanehsan/osf.io,reinaH/osf.io,RomanZWang/osf.io,KAsante95/osf.io,GageGaskins/osf.io,GageGaskins/osf.io,haoyuchen1992/osf.io,cldershem/osf.io
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''Migration script for Search-enabled Models.'''
from __future__ import absolute_import
import logging
from modularodm.query.querydialect import DefaultQueryDialect as Q
from website.models import Node
from framework.auth import User
import website.search.search as search
from website.app import init_app
logger = logging.getLogger(__name__)
app = init_app("website.settings", set_backends=True, routes=True)
def migrate_nodes():
nodes = Node.find(Q('is_public', 'eq', True) & Q('is_deleted', 'eq', False))
for i, node in enumerate(nodes):
node.update_search()
return i + 1 # Started counting from 0
def migrate_users():
for i, user in enumerate(User.find()):
if user.is_active:
user.update_search()
return i + 1 # Started counting from 0
def main():
ctx = app.test_request_context()
ctx.push()
search.delete_all()
search.create_index()
logger.info("Nodes migrated: {}".format(migrate_nodes()))
logger.info("Users migrated: {}".format(migrate_users()))
ctx.pop()
if __name__ == '__main__':
main()
Add additional logging for users'
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''Migration script for Search-enabled Models.'''
from __future__ import absolute_import
import logging
from modularodm.query.querydialect import DefaultQueryDialect as Q
from website.models import Node
from framework.auth import User
import website.search.search as search
from website.app import init_app
logger = logging.getLogger(__name__)
app = init_app("website.settings", set_backends=True, routes=True)
def migrate_nodes():
nodes = Node.find(Q('is_public', 'eq', True) & Q('is_deleted', 'eq', False))
for i, node in enumerate(nodes):
node.update_search()
logger.info('Nodes migrated: {}'.format(i + 1))
def migrate_users():
n_iter = 0
for i, user in enumerate(User.find()):
if user.is_active:
user.update_search()
n_iter += 1
logger.info('Users iterated: {0}\nUsers migrated: {1}'.format(i + 1, n_iter))
def main():
ctx = app.test_request_context()
ctx.push()
search.delete_all()
search.create_index()
migrate_nodes()
migrate_users()
ctx.pop()
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''Migration script for Search-enabled Models.'''
from __future__ import absolute_import
import logging
from modularodm.query.querydialect import DefaultQueryDialect as Q
from website.models import Node
from framework.auth import User
import website.search.search as search
from website.app import init_app
logger = logging.getLogger(__name__)
app = init_app("website.settings", set_backends=True, routes=True)
def migrate_nodes():
nodes = Node.find(Q('is_public', 'eq', True) & Q('is_deleted', 'eq', False))
for i, node in enumerate(nodes):
node.update_search()
return i + 1 # Started counting from 0
def migrate_users():
for i, user in enumerate(User.find()):
if user.is_active:
user.update_search()
return i + 1 # Started counting from 0
def main():
ctx = app.test_request_context()
ctx.push()
search.delete_all()
search.create_index()
logger.info("Nodes migrated: {}".format(migrate_nodes()))
logger.info("Users migrated: {}".format(migrate_users()))
ctx.pop()
if __name__ == '__main__':
main()
<commit_msg>Add additional logging for users'<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''Migration script for Search-enabled Models.'''
from __future__ import absolute_import
import logging
from modularodm.query.querydialect import DefaultQueryDialect as Q
from website.models import Node
from framework.auth import User
import website.search.search as search
from website.app import init_app
logger = logging.getLogger(__name__)
app = init_app("website.settings", set_backends=True, routes=True)
def migrate_nodes():
nodes = Node.find(Q('is_public', 'eq', True) & Q('is_deleted', 'eq', False))
for i, node in enumerate(nodes):
node.update_search()
logger.info('Nodes migrated: {}'.format(i + 1))
def migrate_users():
n_iter = 0
for i, user in enumerate(User.find()):
if user.is_active:
user.update_search()
n_iter += 1
logger.info('Users iterated: {0}\nUsers migrated: {1}'.format(i + 1, n_iter))
def main():
ctx = app.test_request_context()
ctx.push()
search.delete_all()
search.create_index()
migrate_nodes()
migrate_users()
ctx.pop()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''Migration script for Search-enabled Models.'''
from __future__ import absolute_import
import logging
from modularodm.query.querydialect import DefaultQueryDialect as Q
from website.models import Node
from framework.auth import User
import website.search.search as search
from website.app import init_app
logger = logging.getLogger(__name__)
app = init_app("website.settings", set_backends=True, routes=True)
def migrate_nodes():
nodes = Node.find(Q('is_public', 'eq', True) & Q('is_deleted', 'eq', False))
for i, node in enumerate(nodes):
node.update_search()
return i + 1 # Started counting from 0
def migrate_users():
for i, user in enumerate(User.find()):
if user.is_active:
user.update_search()
return i + 1 # Started counting from 0
def main():
ctx = app.test_request_context()
ctx.push()
search.delete_all()
search.create_index()
logger.info("Nodes migrated: {}".format(migrate_nodes()))
logger.info("Users migrated: {}".format(migrate_users()))
ctx.pop()
if __name__ == '__main__':
main()
Add additional logging for users'#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''Migration script for Search-enabled Models.'''
from __future__ import absolute_import
import logging
from modularodm.query.querydialect import DefaultQueryDialect as Q
from website.models import Node
from framework.auth import User
import website.search.search as search
from website.app import init_app
logger = logging.getLogger(__name__)
app = init_app("website.settings", set_backends=True, routes=True)
def migrate_nodes():
nodes = Node.find(Q('is_public', 'eq', True) & Q('is_deleted', 'eq', False))
for i, node in enumerate(nodes):
node.update_search()
logger.info('Nodes migrated: {}'.format(i + 1))
def migrate_users():
n_iter = 0
for i, user in enumerate(User.find()):
if user.is_active:
user.update_search()
n_iter += 1
logger.info('Users iterated: {0}\nUsers migrated: {1}'.format(i + 1, n_iter))
def main():
ctx = app.test_request_context()
ctx.push()
search.delete_all()
search.create_index()
migrate_nodes()
migrate_users()
ctx.pop()
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''Migration script for Search-enabled Models.'''
from __future__ import absolute_import
import logging
from modularodm.query.querydialect import DefaultQueryDialect as Q
from website.models import Node
from framework.auth import User
import website.search.search as search
from website.app import init_app
logger = logging.getLogger(__name__)
app = init_app("website.settings", set_backends=True, routes=True)
def migrate_nodes():
nodes = Node.find(Q('is_public', 'eq', True) & Q('is_deleted', 'eq', False))
for i, node in enumerate(nodes):
node.update_search()
return i + 1 # Started counting from 0
def migrate_users():
for i, user in enumerate(User.find()):
if user.is_active:
user.update_search()
return i + 1 # Started counting from 0
def main():
ctx = app.test_request_context()
ctx.push()
search.delete_all()
search.create_index()
logger.info("Nodes migrated: {}".format(migrate_nodes()))
logger.info("Users migrated: {}".format(migrate_users()))
ctx.pop()
if __name__ == '__main__':
main()
<commit_msg>Add additional logging for users'<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''Migration script for Search-enabled Models.'''
from __future__ import absolute_import
import logging
from modularodm.query.querydialect import DefaultQueryDialect as Q
from website.models import Node
from framework.auth import User
import website.search.search as search
from website.app import init_app
logger = logging.getLogger(__name__)
app = init_app("website.settings", set_backends=True, routes=True)
def migrate_nodes():
nodes = Node.find(Q('is_public', 'eq', True) & Q('is_deleted', 'eq', False))
for i, node in enumerate(nodes):
node.update_search()
logger.info('Nodes migrated: {}'.format(i + 1))
def migrate_users():
n_iter = 0
for i, user in enumerate(User.find()):
if user.is_active:
user.update_search()
n_iter += 1
logger.info('Users iterated: {0}\nUsers migrated: {1}'.format(i + 1, n_iter))
def main():
ctx = app.test_request_context()
ctx.push()
search.delete_all()
search.create_index()
migrate_nodes()
migrate_users()
ctx.pop()
if __name__ == '__main__':
main()
|
61b5a3f2bdbde977fbc6dd0042209e0d67a53318
|
api/urls.py
|
api/urls.py
|
from django.conf.urls import url, include
from rest_framework import routers
from api import views
router = routers.DefaultRouter()
router.register(r'categories', views.CategoryViewSet)
router.register(r'commodities', views.CommodityViewSet)
router.register(r'economies', views.EconomyViewSet)
router.register(r'factions', views.FactionViewSet)
router.register(r'governments', views.GovernmentViewSet)
router.register(r'allegiances', views.AllegianceViewSet)
router.register(r'states', views.StateViewSet)
router.register(r'securities', views.SecurityViewSet)
router.register(r'systems', views.SystemViewSet)
router.register(r'station_types', views.StationTypeViewSet)
router.register(r'stations', views.StationViewSet)
router.register(r'listings', views.ListingViewSet)
# Wire up our API using automatic URL routing.
# Additionally, we include login URLs for the browsable API.
urlpatterns = [
url(r'^/', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
|
from django.conf.urls import url, include
from rest_framework import routers
from api import views
router = routers.DefaultRouter()
router.register(r'categories', views.CategoryViewSet)
router.register(r'commodities', views.CommodityViewSet)
router.register(r'economies', views.EconomyViewSet)
router.register(r'factions', views.FactionViewSet)
router.register(r'governments', views.GovernmentViewSet)
router.register(r'allegiances', views.AllegianceViewSet)
router.register(r'states', views.StateViewSet)
router.register(r'securities', views.SecurityViewSet)
router.register(r'systems', views.SystemViewSet)
router.register(r'station_types', views.StationTypeViewSet)
router.register(r'stations', views.StationViewSet)
router.register(r'listings', views.ListingViewSet)
# Wire up our API using automatic URL routing.
# Additionally, we include login URLs for the browsable API.
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
|
Fix for the api at root url.
|
Fix for the api at root url.
|
Python
|
mit
|
Puciek/elite-backend,Puciek/elite-backend
|
from django.conf.urls import url, include
from rest_framework import routers
from api import views
router = routers.DefaultRouter()
router.register(r'categories', views.CategoryViewSet)
router.register(r'commodities', views.CommodityViewSet)
router.register(r'economies', views.EconomyViewSet)
router.register(r'factions', views.FactionViewSet)
router.register(r'governments', views.GovernmentViewSet)
router.register(r'allegiances', views.AllegianceViewSet)
router.register(r'states', views.StateViewSet)
router.register(r'securities', views.SecurityViewSet)
router.register(r'systems', views.SystemViewSet)
router.register(r'station_types', views.StationTypeViewSet)
router.register(r'stations', views.StationViewSet)
router.register(r'listings', views.ListingViewSet)
# Wire up our API using automatic URL routing.
# Additionally, we include login URLs for the browsable API.
urlpatterns = [
url(r'^/', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]Fix for the api at root url.
|
from django.conf.urls import url, include
from rest_framework import routers
from api import views
router = routers.DefaultRouter()
router.register(r'categories', views.CategoryViewSet)
router.register(r'commodities', views.CommodityViewSet)
router.register(r'economies', views.EconomyViewSet)
router.register(r'factions', views.FactionViewSet)
router.register(r'governments', views.GovernmentViewSet)
router.register(r'allegiances', views.AllegianceViewSet)
router.register(r'states', views.StateViewSet)
router.register(r'securities', views.SecurityViewSet)
router.register(r'systems', views.SystemViewSet)
router.register(r'station_types', views.StationTypeViewSet)
router.register(r'stations', views.StationViewSet)
router.register(r'listings', views.ListingViewSet)
# Wire up our API using automatic URL routing.
# Additionally, we include login URLs for the browsable API.
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
|
<commit_before>from django.conf.urls import url, include
from rest_framework import routers
from api import views
router = routers.DefaultRouter()
router.register(r'categories', views.CategoryViewSet)
router.register(r'commodities', views.CommodityViewSet)
router.register(r'economies', views.EconomyViewSet)
router.register(r'factions', views.FactionViewSet)
router.register(r'governments', views.GovernmentViewSet)
router.register(r'allegiances', views.AllegianceViewSet)
router.register(r'states', views.StateViewSet)
router.register(r'securities', views.SecurityViewSet)
router.register(r'systems', views.SystemViewSet)
router.register(r'station_types', views.StationTypeViewSet)
router.register(r'stations', views.StationViewSet)
router.register(r'listings', views.ListingViewSet)
# Wire up our API using automatic URL routing.
# Additionally, we include login URLs for the browsable API.
urlpatterns = [
url(r'^/', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]<commit_msg>Fix for the api at root url.<commit_after>
|
from django.conf.urls import url, include
from rest_framework import routers
from api import views
router = routers.DefaultRouter()
router.register(r'categories', views.CategoryViewSet)
router.register(r'commodities', views.CommodityViewSet)
router.register(r'economies', views.EconomyViewSet)
router.register(r'factions', views.FactionViewSet)
router.register(r'governments', views.GovernmentViewSet)
router.register(r'allegiances', views.AllegianceViewSet)
router.register(r'states', views.StateViewSet)
router.register(r'securities', views.SecurityViewSet)
router.register(r'systems', views.SystemViewSet)
router.register(r'station_types', views.StationTypeViewSet)
router.register(r'stations', views.StationViewSet)
router.register(r'listings', views.ListingViewSet)
# Wire up our API using automatic URL routing.
# Additionally, we include login URLs for the browsable API.
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
|
from django.conf.urls import url, include
from rest_framework import routers
from api import views
router = routers.DefaultRouter()
router.register(r'categories', views.CategoryViewSet)
router.register(r'commodities', views.CommodityViewSet)
router.register(r'economies', views.EconomyViewSet)
router.register(r'factions', views.FactionViewSet)
router.register(r'governments', views.GovernmentViewSet)
router.register(r'allegiances', views.AllegianceViewSet)
router.register(r'states', views.StateViewSet)
router.register(r'securities', views.SecurityViewSet)
router.register(r'systems', views.SystemViewSet)
router.register(r'station_types', views.StationTypeViewSet)
router.register(r'stations', views.StationViewSet)
router.register(r'listings', views.ListingViewSet)
# Wire up our API using automatic URL routing.
# Additionally, we include login URLs for the browsable API.
urlpatterns = [
url(r'^/', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]Fix for the api at root url.from django.conf.urls import url, include
from rest_framework import routers
from api import views
router = routers.DefaultRouter()
router.register(r'categories', views.CategoryViewSet)
router.register(r'commodities', views.CommodityViewSet)
router.register(r'economies', views.EconomyViewSet)
router.register(r'factions', views.FactionViewSet)
router.register(r'governments', views.GovernmentViewSet)
router.register(r'allegiances', views.AllegianceViewSet)
router.register(r'states', views.StateViewSet)
router.register(r'securities', views.SecurityViewSet)
router.register(r'systems', views.SystemViewSet)
router.register(r'station_types', views.StationTypeViewSet)
router.register(r'stations', views.StationViewSet)
router.register(r'listings', views.ListingViewSet)
# Wire up our API using automatic URL routing.
# Additionally, we include login URLs for the browsable API.
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
|
<commit_before>from django.conf.urls import url, include
from rest_framework import routers
from api import views
router = routers.DefaultRouter()
router.register(r'categories', views.CategoryViewSet)
router.register(r'commodities', views.CommodityViewSet)
router.register(r'economies', views.EconomyViewSet)
router.register(r'factions', views.FactionViewSet)
router.register(r'governments', views.GovernmentViewSet)
router.register(r'allegiances', views.AllegianceViewSet)
router.register(r'states', views.StateViewSet)
router.register(r'securities', views.SecurityViewSet)
router.register(r'systems', views.SystemViewSet)
router.register(r'station_types', views.StationTypeViewSet)
router.register(r'stations', views.StationViewSet)
router.register(r'listings', views.ListingViewSet)
# Wire up our API using automatic URL routing.
# Additionally, we include login URLs for the browsable API.
urlpatterns = [
url(r'^/', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]<commit_msg>Fix for the api at root url.<commit_after>from django.conf.urls import url, include
from rest_framework import routers
from api import views
router = routers.DefaultRouter()
router.register(r'categories', views.CategoryViewSet)
router.register(r'commodities', views.CommodityViewSet)
router.register(r'economies', views.EconomyViewSet)
router.register(r'factions', views.FactionViewSet)
router.register(r'governments', views.GovernmentViewSet)
router.register(r'allegiances', views.AllegianceViewSet)
router.register(r'states', views.StateViewSet)
router.register(r'securities', views.SecurityViewSet)
router.register(r'systems', views.SystemViewSet)
router.register(r'station_types', views.StationTypeViewSet)
router.register(r'stations', views.StationViewSet)
router.register(r'listings', views.ListingViewSet)
# Wire up our API using automatic URL routing.
# Additionally, we include login URLs for the browsable API.
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
|
305849d57cc6897c65b4e0996f70a21f1d873d25
|
awp/main.py
|
awp/main.py
|
#!/usr/bin/env python3
# coding=utf-8
import argparse
import json
import jsonschema
import awp.packager
import awp.validator
# Parse arguments given via command-line interface
def parse_cli_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'--force', '-f', action='store_true',
help='forces the copying of all files and directories')
parser.add_argument(
'--export', '-e', nargs='?', const='', default=None,
help='exports the installed workflow to the local project directory')
parser.add_argument(
'--version', '-v',
help='the new version number to use for the workflow')
return parser.parse_args()
# Locate and parse the configuration for the utility
def get_utility_config():
with open('packager.json', 'r') as config_file:
return json.load(config_file)
def main():
cli_args = parse_cli_args()
config = get_utility_config()
try:
awp.validator.validate_config(config)
awp.packager.package_workflow(
config,
version=cli_args.version,
export_file=cli_args.export,
force=cli_args.force)
except jsonschema.exceptions.ValidationError as error:
print(error.message)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
# coding=utf-8
import argparse
import json
import jsonschema
import awp.packager
import awp.validator
# Parse arguments given via command-line interface
def parse_cli_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'--force', '-f', action='store_true',
help='forces the copying of all files and directories')
parser.add_argument(
'--export', '-e', nargs='?', const='', default=None,
help='exports the installed workflow to the local project directory')
parser.add_argument(
'--version', '-v',
help='the new version number to use for the workflow')
return parser.parse_args()
# Locate and parse the configuration for the utility
def get_utility_config():
with open('packager.json', 'r') as config_file:
return json.load(config_file)
def main():
cli_args = parse_cli_args()
config = get_utility_config()
try:
awp.validator.validate_config(config)
awp.packager.package_workflow(
config,
version=cli_args.version,
export_file=cli_args.export,
force=cli_args.force)
except jsonschema.exceptions.ValidationError as error:
print('awp (from packager.json): {}'.format(error.message))
if __name__ == '__main__':
main()
|
Clarify where packager.json validation error originates
|
Clarify where packager.json validation error originates
|
Python
|
mit
|
caleb531/alfred-workflow-packager
|
#!/usr/bin/env python3
# coding=utf-8
import argparse
import json
import jsonschema
import awp.packager
import awp.validator
# Parse arguments given via command-line interface
def parse_cli_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'--force', '-f', action='store_true',
help='forces the copying of all files and directories')
parser.add_argument(
'--export', '-e', nargs='?', const='', default=None,
help='exports the installed workflow to the local project directory')
parser.add_argument(
'--version', '-v',
help='the new version number to use for the workflow')
return parser.parse_args()
# Locate and parse the configuration for the utility
def get_utility_config():
with open('packager.json', 'r') as config_file:
return json.load(config_file)
def main():
cli_args = parse_cli_args()
config = get_utility_config()
try:
awp.validator.validate_config(config)
awp.packager.package_workflow(
config,
version=cli_args.version,
export_file=cli_args.export,
force=cli_args.force)
except jsonschema.exceptions.ValidationError as error:
print(error.message)
if __name__ == '__main__':
main()
Clarify where packager.json validation error originates
|
#!/usr/bin/env python3
# coding=utf-8
import argparse
import json
import jsonschema
import awp.packager
import awp.validator
# Parse arguments given via command-line interface
def parse_cli_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'--force', '-f', action='store_true',
help='forces the copying of all files and directories')
parser.add_argument(
'--export', '-e', nargs='?', const='', default=None,
help='exports the installed workflow to the local project directory')
parser.add_argument(
'--version', '-v',
help='the new version number to use for the workflow')
return parser.parse_args()
# Locate and parse the configuration for the utility
def get_utility_config():
with open('packager.json', 'r') as config_file:
return json.load(config_file)
def main():
cli_args = parse_cli_args()
config = get_utility_config()
try:
awp.validator.validate_config(config)
awp.packager.package_workflow(
config,
version=cli_args.version,
export_file=cli_args.export,
force=cli_args.force)
except jsonschema.exceptions.ValidationError as error:
print('awp (from packager.json): {}'.format(error.message))
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python3
# coding=utf-8
import argparse
import json
import jsonschema
import awp.packager
import awp.validator
# Parse arguments given via command-line interface
def parse_cli_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'--force', '-f', action='store_true',
help='forces the copying of all files and directories')
parser.add_argument(
'--export', '-e', nargs='?', const='', default=None,
help='exports the installed workflow to the local project directory')
parser.add_argument(
'--version', '-v',
help='the new version number to use for the workflow')
return parser.parse_args()
# Locate and parse the configuration for the utility
def get_utility_config():
with open('packager.json', 'r') as config_file:
return json.load(config_file)
def main():
cli_args = parse_cli_args()
config = get_utility_config()
try:
awp.validator.validate_config(config)
awp.packager.package_workflow(
config,
version=cli_args.version,
export_file=cli_args.export,
force=cli_args.force)
except jsonschema.exceptions.ValidationError as error:
print(error.message)
if __name__ == '__main__':
main()
<commit_msg>Clarify where packager.json validation error originates<commit_after>
|
#!/usr/bin/env python3
# coding=utf-8
import argparse
import json
import jsonschema
import awp.packager
import awp.validator
# Parse arguments given via command-line interface
def parse_cli_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'--force', '-f', action='store_true',
help='forces the copying of all files and directories')
parser.add_argument(
'--export', '-e', nargs='?', const='', default=None,
help='exports the installed workflow to the local project directory')
parser.add_argument(
'--version', '-v',
help='the new version number to use for the workflow')
return parser.parse_args()
# Locate and parse the configuration for the utility
def get_utility_config():
with open('packager.json', 'r') as config_file:
return json.load(config_file)
def main():
cli_args = parse_cli_args()
config = get_utility_config()
try:
awp.validator.validate_config(config)
awp.packager.package_workflow(
config,
version=cli_args.version,
export_file=cli_args.export,
force=cli_args.force)
except jsonschema.exceptions.ValidationError as error:
print('awp (from packager.json): {}'.format(error.message))
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
# coding=utf-8
import argparse
import json
import jsonschema
import awp.packager
import awp.validator
# Parse arguments given via command-line interface
def parse_cli_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'--force', '-f', action='store_true',
help='forces the copying of all files and directories')
parser.add_argument(
'--export', '-e', nargs='?', const='', default=None,
help='exports the installed workflow to the local project directory')
parser.add_argument(
'--version', '-v',
help='the new version number to use for the workflow')
return parser.parse_args()
# Locate and parse the configuration for the utility
def get_utility_config():
with open('packager.json', 'r') as config_file:
return json.load(config_file)
def main():
cli_args = parse_cli_args()
config = get_utility_config()
try:
awp.validator.validate_config(config)
awp.packager.package_workflow(
config,
version=cli_args.version,
export_file=cli_args.export,
force=cli_args.force)
except jsonschema.exceptions.ValidationError as error:
print(error.message)
if __name__ == '__main__':
main()
Clarify where packager.json validation error originates#!/usr/bin/env python3
# coding=utf-8
import argparse
import json
import jsonschema
import awp.packager
import awp.validator
# Parse arguments given via command-line interface
def parse_cli_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'--force', '-f', action='store_true',
help='forces the copying of all files and directories')
parser.add_argument(
'--export', '-e', nargs='?', const='', default=None,
help='exports the installed workflow to the local project directory')
parser.add_argument(
'--version', '-v',
help='the new version number to use for the workflow')
return parser.parse_args()
# Locate and parse the configuration for the utility
def get_utility_config():
with open('packager.json', 'r') as config_file:
return json.load(config_file)
def main():
cli_args = parse_cli_args()
config = get_utility_config()
try:
awp.validator.validate_config(config)
awp.packager.package_workflow(
config,
version=cli_args.version,
export_file=cli_args.export,
force=cli_args.force)
except jsonschema.exceptions.ValidationError as error:
print('awp (from packager.json): {}'.format(error.message))
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python3
# coding=utf-8
import argparse
import json
import jsonschema
import awp.packager
import awp.validator
# Parse arguments given via command-line interface
def parse_cli_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'--force', '-f', action='store_true',
help='forces the copying of all files and directories')
parser.add_argument(
'--export', '-e', nargs='?', const='', default=None,
help='exports the installed workflow to the local project directory')
parser.add_argument(
'--version', '-v',
help='the new version number to use for the workflow')
return parser.parse_args()
# Locate and parse the configuration for the utility
def get_utility_config():
with open('packager.json', 'r') as config_file:
return json.load(config_file)
def main():
cli_args = parse_cli_args()
config = get_utility_config()
try:
awp.validator.validate_config(config)
awp.packager.package_workflow(
config,
version=cli_args.version,
export_file=cli_args.export,
force=cli_args.force)
except jsonschema.exceptions.ValidationError as error:
print(error.message)
if __name__ == '__main__':
main()
<commit_msg>Clarify where packager.json validation error originates<commit_after>#!/usr/bin/env python3
# coding=utf-8
import argparse
import json
import jsonschema
import awp.packager
import awp.validator
# Parse arguments given via command-line interface
def parse_cli_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'--force', '-f', action='store_true',
help='forces the copying of all files and directories')
parser.add_argument(
'--export', '-e', nargs='?', const='', default=None,
help='exports the installed workflow to the local project directory')
parser.add_argument(
'--version', '-v',
help='the new version number to use for the workflow')
return parser.parse_args()
# Locate and parse the configuration for the utility
def get_utility_config():
with open('packager.json', 'r') as config_file:
return json.load(config_file)
def main():
cli_args = parse_cli_args()
config = get_utility_config()
try:
awp.validator.validate_config(config)
awp.packager.package_workflow(
config,
version=cli_args.version,
export_file=cli_args.export,
force=cli_args.force)
except jsonschema.exceptions.ValidationError as error:
print('awp (from packager.json): {}'.format(error.message))
if __name__ == '__main__':
main()
|
261421a647fae9eb4df998c26740f7141a68c13d
|
chargehound/__init__.py
|
chargehound/__init__.py
|
from chargehound.resources import Disputes
api_key = None
host = 'api.chargehound.com'
base_path = '/v1/'
timeout = 5
__all__ = [api_key, host, Disputes, timeout]
|
from chargehound.resources import Disputes
api_key = None
host = 'api.chargehound.com'
base_path = '/v1/'
timeout = 60
__all__ = [api_key, host, Disputes, timeout]
|
Set timeout to 60 seconds
|
Set timeout to 60 seconds
|
Python
|
mit
|
chargehound/chargehound-python
|
from chargehound.resources import Disputes
api_key = None
host = 'api.chargehound.com'
base_path = '/v1/'
timeout = 5
__all__ = [api_key, host, Disputes, timeout]
Set timeout to 60 seconds
|
from chargehound.resources import Disputes
api_key = None
host = 'api.chargehound.com'
base_path = '/v1/'
timeout = 60
__all__ = [api_key, host, Disputes, timeout]
|
<commit_before>from chargehound.resources import Disputes
api_key = None
host = 'api.chargehound.com'
base_path = '/v1/'
timeout = 5
__all__ = [api_key, host, Disputes, timeout]
<commit_msg>Set timeout to 60 seconds<commit_after>
|
from chargehound.resources import Disputes
api_key = None
host = 'api.chargehound.com'
base_path = '/v1/'
timeout = 60
__all__ = [api_key, host, Disputes, timeout]
|
from chargehound.resources import Disputes
api_key = None
host = 'api.chargehound.com'
base_path = '/v1/'
timeout = 5
__all__ = [api_key, host, Disputes, timeout]
Set timeout to 60 secondsfrom chargehound.resources import Disputes
api_key = None
host = 'api.chargehound.com'
base_path = '/v1/'
timeout = 60
__all__ = [api_key, host, Disputes, timeout]
|
<commit_before>from chargehound.resources import Disputes
api_key = None
host = 'api.chargehound.com'
base_path = '/v1/'
timeout = 5
__all__ = [api_key, host, Disputes, timeout]
<commit_msg>Set timeout to 60 seconds<commit_after>from chargehound.resources import Disputes
api_key = None
host = 'api.chargehound.com'
base_path = '/v1/'
timeout = 60
__all__ = [api_key, host, Disputes, timeout]
|
82c95e2fcb1d3879ac9b935c7c9b883c42acf26a
|
trombi/__init__.py
|
trombi/__init__.py
|
# Copyright (c) 2010 Inoi Oy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from trombi.client import *
|
# Copyright (c) 2010 Inoi Oy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from .client import *
|
Remove the fully qualified module reference 'trombi.client'
|
Remove the fully qualified module reference 'trombi.client'
If there happens to be more than one version of trombi on the system
(such as stable vs testing) the one in the PYTHONPATH that gets
encountered will be silently loaded when specifically loading the
module __init__ file for the other client. Now using the relative
'from .client import *'.
|
Python
|
mit
|
inoi/trombi
|
# Copyright (c) 2010 Inoi Oy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from trombi.client import *
Remove the fully qualified module reference 'trombi.client'
If there happens to be more than one version of trombi on the system
(such as stable vs testing) the one in the PYTHONPATH that gets
encountered will be silently loaded when specifically loading the
module __init__ file for the other client. Now using the relative
'from .client import *'.
|
# Copyright (c) 2010 Inoi Oy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from .client import *
|
<commit_before># Copyright (c) 2010 Inoi Oy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from trombi.client import *
<commit_msg>Remove the fully qualified module reference 'trombi.client'
If there happens to be more than one version of trombi on the system
(such as stable vs testing) the one in the PYTHONPATH that gets
encountered will be silently loaded when specifically loading the
module __init__ file for the other client. Now using the relative
'from .client import *'.<commit_after>
|
# Copyright (c) 2010 Inoi Oy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from .client import *
|
# Copyright (c) 2010 Inoi Oy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from trombi.client import *
Remove the fully qualified module reference 'trombi.client'
If there happens to be more than one version of trombi on the system
(such as stable vs testing) the one in the PYTHONPATH that gets
encountered will be silently loaded when specifically loading the
module __init__ file for the other client. Now using the relative
'from .client import *'.# Copyright (c) 2010 Inoi Oy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from .client import *
|
<commit_before># Copyright (c) 2010 Inoi Oy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from trombi.client import *
<commit_msg>Remove the fully qualified module reference 'trombi.client'
If there happens to be more than one version of trombi on the system
(such as stable vs testing) the one in the PYTHONPATH that gets
encountered will be silently loaded when specifically loading the
module __init__ file for the other client. Now using the relative
'from .client import *'.<commit_after># Copyright (c) 2010 Inoi Oy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from .client import *
|
d629e7e1bb24e6ee7a0224b4929d538a23afaa43
|
commands.py
|
commands.py
|
from twisted.protocols import amp
from twisted.cred.error import UnauthorizedLogin
# commands to server side
class Login(amp.Command):
arguments = [("username", amp.String()), ("password", amp.String())]
response = []
errors = {UnauthorizedLogin: "UnauthorizedLogin"}
class SendToAll(amp.Command):
arguments = [("message", amp.String())]
response = []
class SendToUser(amp.Command):
arguments = [("message", amp.String()), "username", amp.String()]
response = []
# commands to client side
class Send(amp.Command):
arguments = [("message", amp.String()), ("sender", amp.String())]
response = []
class AddUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class DelUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class LoggedIn(amp.Command):
arguments = [("ok", amp.Boolean())]
response = []
|
from twisted.protocols import amp
from twisted.cred.error import UnauthorizedLogin
# commands to server side
class Login(amp.Command):
arguments = [("username", amp.String()), ("password", amp.String())]
response = []
errors = {UnauthorizedLogin: "UnauthorizedLogin"}
class SendToAll(amp.Command):
arguments = [("message", amp.String())]
response = []
class SendToUser(amp.Command):
arguments = [("message", amp.String()), ("username", amp.String())]
response = []
# commands to client side
class Send(amp.Command):
arguments = [("message", amp.String()), ("sender", amp.String())]
response = []
class AddUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class DelUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class LoggedIn(amp.Command):
arguments = [("ok", amp.Boolean())]
response = []
|
Fix syntax problem in SendToUser.arguments
|
Fix syntax problem in SendToUser.arguments
|
Python
|
mit
|
dripton/ampchat
|
from twisted.protocols import amp
from twisted.cred.error import UnauthorizedLogin
# commands to server side
class Login(amp.Command):
arguments = [("username", amp.String()), ("password", amp.String())]
response = []
errors = {UnauthorizedLogin: "UnauthorizedLogin"}
class SendToAll(amp.Command):
arguments = [("message", amp.String())]
response = []
class SendToUser(amp.Command):
arguments = [("message", amp.String()), "username", amp.String()]
response = []
# commands to client side
class Send(amp.Command):
arguments = [("message", amp.String()), ("sender", amp.String())]
response = []
class AddUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class DelUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class LoggedIn(amp.Command):
arguments = [("ok", amp.Boolean())]
response = []
Fix syntax problem in SendToUser.arguments
|
from twisted.protocols import amp
from twisted.cred.error import UnauthorizedLogin
# commands to server side
class Login(amp.Command):
arguments = [("username", amp.String()), ("password", amp.String())]
response = []
errors = {UnauthorizedLogin: "UnauthorizedLogin"}
class SendToAll(amp.Command):
arguments = [("message", amp.String())]
response = []
class SendToUser(amp.Command):
arguments = [("message", amp.String()), ("username", amp.String())]
response = []
# commands to client side
class Send(amp.Command):
arguments = [("message", amp.String()), ("sender", amp.String())]
response = []
class AddUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class DelUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class LoggedIn(amp.Command):
arguments = [("ok", amp.Boolean())]
response = []
|
<commit_before>from twisted.protocols import amp
from twisted.cred.error import UnauthorizedLogin
# commands to server side
class Login(amp.Command):
arguments = [("username", amp.String()), ("password", amp.String())]
response = []
errors = {UnauthorizedLogin: "UnauthorizedLogin"}
class SendToAll(amp.Command):
arguments = [("message", amp.String())]
response = []
class SendToUser(amp.Command):
arguments = [("message", amp.String()), "username", amp.String()]
response = []
# commands to client side
class Send(amp.Command):
arguments = [("message", amp.String()), ("sender", amp.String())]
response = []
class AddUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class DelUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class LoggedIn(amp.Command):
arguments = [("ok", amp.Boolean())]
response = []
<commit_msg>Fix syntax problem in SendToUser.arguments<commit_after>
|
from twisted.protocols import amp
from twisted.cred.error import UnauthorizedLogin
# commands to server side
class Login(amp.Command):
arguments = [("username", amp.String()), ("password", amp.String())]
response = []
errors = {UnauthorizedLogin: "UnauthorizedLogin"}
class SendToAll(amp.Command):
arguments = [("message", amp.String())]
response = []
class SendToUser(amp.Command):
arguments = [("message", amp.String()), ("username", amp.String())]
response = []
# commands to client side
class Send(amp.Command):
arguments = [("message", amp.String()), ("sender", amp.String())]
response = []
class AddUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class DelUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class LoggedIn(amp.Command):
arguments = [("ok", amp.Boolean())]
response = []
|
from twisted.protocols import amp
from twisted.cred.error import UnauthorizedLogin
# commands to server side
class Login(amp.Command):
arguments = [("username", amp.String()), ("password", amp.String())]
response = []
errors = {UnauthorizedLogin: "UnauthorizedLogin"}
class SendToAll(amp.Command):
arguments = [("message", amp.String())]
response = []
class SendToUser(amp.Command):
arguments = [("message", amp.String()), "username", amp.String()]
response = []
# commands to client side
class Send(amp.Command):
arguments = [("message", amp.String()), ("sender", amp.String())]
response = []
class AddUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class DelUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class LoggedIn(amp.Command):
arguments = [("ok", amp.Boolean())]
response = []
Fix syntax problem in SendToUser.argumentsfrom twisted.protocols import amp
from twisted.cred.error import UnauthorizedLogin
# commands to server side
class Login(amp.Command):
arguments = [("username", amp.String()), ("password", amp.String())]
response = []
errors = {UnauthorizedLogin: "UnauthorizedLogin"}
class SendToAll(amp.Command):
arguments = [("message", amp.String())]
response = []
class SendToUser(amp.Command):
arguments = [("message", amp.String()), ("username", amp.String())]
response = []
# commands to client side
class Send(amp.Command):
arguments = [("message", amp.String()), ("sender", amp.String())]
response = []
class AddUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class DelUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class LoggedIn(amp.Command):
arguments = [("ok", amp.Boolean())]
response = []
|
<commit_before>from twisted.protocols import amp
from twisted.cred.error import UnauthorizedLogin
# commands to server side
class Login(amp.Command):
arguments = [("username", amp.String()), ("password", amp.String())]
response = []
errors = {UnauthorizedLogin: "UnauthorizedLogin"}
class SendToAll(amp.Command):
arguments = [("message", amp.String())]
response = []
class SendToUser(amp.Command):
arguments = [("message", amp.String()), "username", amp.String()]
response = []
# commands to client side
class Send(amp.Command):
arguments = [("message", amp.String()), ("sender", amp.String())]
response = []
class AddUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class DelUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class LoggedIn(amp.Command):
arguments = [("ok", amp.Boolean())]
response = []
<commit_msg>Fix syntax problem in SendToUser.arguments<commit_after>from twisted.protocols import amp
from twisted.cred.error import UnauthorizedLogin
# commands to server side
class Login(amp.Command):
arguments = [("username", amp.String()), ("password", amp.String())]
response = []
errors = {UnauthorizedLogin: "UnauthorizedLogin"}
class SendToAll(amp.Command):
arguments = [("message", amp.String())]
response = []
class SendToUser(amp.Command):
arguments = [("message", amp.String()), ("username", amp.String())]
response = []
# commands to client side
class Send(amp.Command):
arguments = [("message", amp.String()), ("sender", amp.String())]
response = []
class AddUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class DelUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class LoggedIn(amp.Command):
arguments = [("ok", amp.Boolean())]
response = []
|
ad7507f795f465425e72fb6821115e395046b84d
|
pyshtools/shio/yilm_index_vector.py
|
pyshtools/shio/yilm_index_vector.py
|
def YilmIndexVector(i, l, m):
"""
Compute the index of an 1D array of spherical harmonic coefficients
corresponding to i, l, and m.
Usage
-----
index = YilmIndexVector (i, l, m)
Returns
-------
index : integer
Index of an 1D array of spherical harmonic coefficients corresponding
to i, l, and m.
Parameters
----------
i : integer
1 corresponds to the cosine coefficient cilm[0,:,:], and 2 corresponds
to the sine coefficient cilm[1,:,:].
l : integer
The spherical harmonic degree.
m : integer
The angular order.
Notes
-----
YilmIndexVector will calculate the index of a 1D vector of spherical
harmonic coefficients corresponding to degree l, angular order m and i
(1 = cosine, 2 = sine). The index is given by l**2+(i-1)*l+m.
"""
return l**2 + (i - 1) * l + m
|
def YilmIndexVector(i, l, m):
"""
Compute the index of a 1D array of spherical harmonic coefficients
corresponding to i, l, and m.
Usage
-----
index = YilmIndexVector (i, l, m)
Returns
-------
index : integer
Index of a 1D array of spherical harmonic coefficients corresponding
to i, l, and m.
Parameters
----------
i : integer
1 corresponds to the cosine coefficient Ylm = cilm[0,:,:], and 2
corresponds to the sine coefficient Yl,-m = cilm[1,:,:].
l : integer
The spherical harmonic degree.
m : integer
The angular order, which must be greater or equal to zero.
Notes
-----
YilmIndexVector will calculate the index of a 1D vector of spherical
harmonic coefficients corresponding to degree l, (positive) angular order
m and i (1 = cosine, 2 = sine). The index is given by l**2+(i-1)*l+m.
"""
if l < 0:
raise ValueError('The spherical harmonic degree must be positive. '
'Input value is {:s}'.format(repr(l)))
if m < 0:
raise ValueError('The angular order must be positive. '
'Input value is {:s}'.format(repr(m)))
if m >= l:
raise ValueError('The angular order must be less than or equal to '
'the spherical harmonic degree. Input degree is {:s}.'
' Input order is {:s}.'.format(repr(l), repr(m)))
return l**2 + (i - 1) * l + m
|
Add error checks to YilmIndexVector (and update docs)
|
Add error checks to YilmIndexVector (and update docs)
|
Python
|
bsd-3-clause
|
SHTOOLS/SHTOOLS,MarkWieczorek/SHTOOLS,MarkWieczorek/SHTOOLS,SHTOOLS/SHTOOLS
|
def YilmIndexVector(i, l, m):
"""
Compute the index of an 1D array of spherical harmonic coefficients
corresponding to i, l, and m.
Usage
-----
index = YilmIndexVector (i, l, m)
Returns
-------
index : integer
Index of an 1D array of spherical harmonic coefficients corresponding
to i, l, and m.
Parameters
----------
i : integer
1 corresponds to the cosine coefficient cilm[0,:,:], and 2 corresponds
to the sine coefficient cilm[1,:,:].
l : integer
The spherical harmonic degree.
m : integer
The angular order.
Notes
-----
YilmIndexVector will calculate the index of a 1D vector of spherical
harmonic coefficients corresponding to degree l, angular order m and i
(1 = cosine, 2 = sine). The index is given by l**2+(i-1)*l+m.
"""
return l**2 + (i - 1) * l + m
Add error checks to YilmIndexVector (and update docs)
|
def YilmIndexVector(i, l, m):
"""
Compute the index of a 1D array of spherical harmonic coefficients
corresponding to i, l, and m.
Usage
-----
index = YilmIndexVector (i, l, m)
Returns
-------
index : integer
Index of a 1D array of spherical harmonic coefficients corresponding
to i, l, and m.
Parameters
----------
i : integer
1 corresponds to the cosine coefficient Ylm = cilm[0,:,:], and 2
corresponds to the sine coefficient Yl,-m = cilm[1,:,:].
l : integer
The spherical harmonic degree.
m : integer
The angular order, which must be greater or equal to zero.
Notes
-----
YilmIndexVector will calculate the index of a 1D vector of spherical
harmonic coefficients corresponding to degree l, (positive) angular order
m and i (1 = cosine, 2 = sine). The index is given by l**2+(i-1)*l+m.
"""
if l < 0:
raise ValueError('The spherical harmonic degree must be positive. '
'Input value is {:s}'.format(repr(l)))
if m < 0:
raise ValueError('The angular order must be positive. '
'Input value is {:s}'.format(repr(m)))
if m >= l:
raise ValueError('The angular order must be less than or equal to '
'the spherical harmonic degree. Input degree is {:s}.'
' Input order is {:s}.'.format(repr(l), repr(m)))
return l**2 + (i - 1) * l + m
|
<commit_before>def YilmIndexVector(i, l, m):
"""
Compute the index of an 1D array of spherical harmonic coefficients
corresponding to i, l, and m.
Usage
-----
index = YilmIndexVector (i, l, m)
Returns
-------
index : integer
Index of an 1D array of spherical harmonic coefficients corresponding
to i, l, and m.
Parameters
----------
i : integer
1 corresponds to the cosine coefficient cilm[0,:,:], and 2 corresponds
to the sine coefficient cilm[1,:,:].
l : integer
The spherical harmonic degree.
m : integer
The angular order.
Notes
-----
YilmIndexVector will calculate the index of a 1D vector of spherical
harmonic coefficients corresponding to degree l, angular order m and i
(1 = cosine, 2 = sine). The index is given by l**2+(i-1)*l+m.
"""
return l**2 + (i - 1) * l + m
<commit_msg>Add error checks to YilmIndexVector (and update docs)<commit_after>
|
def YilmIndexVector(i, l, m):
"""
Compute the index of a 1D array of spherical harmonic coefficients
corresponding to i, l, and m.
Usage
-----
index = YilmIndexVector (i, l, m)
Returns
-------
index : integer
Index of a 1D array of spherical harmonic coefficients corresponding
to i, l, and m.
Parameters
----------
i : integer
1 corresponds to the cosine coefficient Ylm = cilm[0,:,:], and 2
corresponds to the sine coefficient Yl,-m = cilm[1,:,:].
l : integer
The spherical harmonic degree.
m : integer
The angular order, which must be greater or equal to zero.
Notes
-----
YilmIndexVector will calculate the index of a 1D vector of spherical
harmonic coefficients corresponding to degree l, (positive) angular order
m and i (1 = cosine, 2 = sine). The index is given by l**2+(i-1)*l+m.
"""
if l < 0:
raise ValueError('The spherical harmonic degree must be positive. '
'Input value is {:s}'.format(repr(l)))
if m < 0:
raise ValueError('The angular order must be positive. '
'Input value is {:s}'.format(repr(m)))
if m >= l:
raise ValueError('The angular order must be less than or equal to '
'the spherical harmonic degree. Input degree is {:s}.'
' Input order is {:s}.'.format(repr(l), repr(m)))
return l**2 + (i - 1) * l + m
|
def YilmIndexVector(i, l, m):
"""
Compute the index of an 1D array of spherical harmonic coefficients
corresponding to i, l, and m.
Usage
-----
index = YilmIndexVector (i, l, m)
Returns
-------
index : integer
Index of an 1D array of spherical harmonic coefficients corresponding
to i, l, and m.
Parameters
----------
i : integer
1 corresponds to the cosine coefficient cilm[0,:,:], and 2 corresponds
to the sine coefficient cilm[1,:,:].
l : integer
The spherical harmonic degree.
m : integer
The angular order.
Notes
-----
YilmIndexVector will calculate the index of a 1D vector of spherical
harmonic coefficients corresponding to degree l, angular order m and i
(1 = cosine, 2 = sine). The index is given by l**2+(i-1)*l+m.
"""
return l**2 + (i - 1) * l + m
Add error checks to YilmIndexVector (and update docs)def YilmIndexVector(i, l, m):
"""
Compute the index of a 1D array of spherical harmonic coefficients
corresponding to i, l, and m.
Usage
-----
index = YilmIndexVector (i, l, m)
Returns
-------
index : integer
Index of a 1D array of spherical harmonic coefficients corresponding
to i, l, and m.
Parameters
----------
i : integer
1 corresponds to the cosine coefficient Ylm = cilm[0,:,:], and 2
corresponds to the sine coefficient Yl,-m = cilm[1,:,:].
l : integer
The spherical harmonic degree.
m : integer
The angular order, which must be greater or equal to zero.
Notes
-----
YilmIndexVector will calculate the index of a 1D vector of spherical
harmonic coefficients corresponding to degree l, (positive) angular order
m and i (1 = cosine, 2 = sine). The index is given by l**2+(i-1)*l+m.
"""
if l < 0:
raise ValueError('The spherical harmonic degree must be positive. '
'Input value is {:s}'.format(repr(l)))
if m < 0:
raise ValueError('The angular order must be positive. '
'Input value is {:s}'.format(repr(m)))
if m >= l:
raise ValueError('The angular order must be less than or equal to '
'the spherical harmonic degree. Input degree is {:s}.'
' Input order is {:s}.'.format(repr(l), repr(m)))
return l**2 + (i - 1) * l + m
|
<commit_before>def YilmIndexVector(i, l, m):
"""
Compute the index of an 1D array of spherical harmonic coefficients
corresponding to i, l, and m.
Usage
-----
index = YilmIndexVector (i, l, m)
Returns
-------
index : integer
Index of an 1D array of spherical harmonic coefficients corresponding
to i, l, and m.
Parameters
----------
i : integer
1 corresponds to the cosine coefficient cilm[0,:,:], and 2 corresponds
to the sine coefficient cilm[1,:,:].
l : integer
The spherical harmonic degree.
m : integer
The angular order.
Notes
-----
YilmIndexVector will calculate the index of a 1D vector of spherical
harmonic coefficients corresponding to degree l, angular order m and i
(1 = cosine, 2 = sine). The index is given by l**2+(i-1)*l+m.
"""
return l**2 + (i - 1) * l + m
<commit_msg>Add error checks to YilmIndexVector (and update docs)<commit_after>def YilmIndexVector(i, l, m):
"""
Compute the index of a 1D array of spherical harmonic coefficients
corresponding to i, l, and m.
Usage
-----
index = YilmIndexVector (i, l, m)
Returns
-------
index : integer
Index of a 1D array of spherical harmonic coefficients corresponding
to i, l, and m.
Parameters
----------
i : integer
1 corresponds to the cosine coefficient Ylm = cilm[0,:,:], and 2
corresponds to the sine coefficient Yl,-m = cilm[1,:,:].
l : integer
The spherical harmonic degree.
m : integer
The angular order, which must be greater or equal to zero.
Notes
-----
YilmIndexVector will calculate the index of a 1D vector of spherical
harmonic coefficients corresponding to degree l, (positive) angular order
m and i (1 = cosine, 2 = sine). The index is given by l**2+(i-1)*l+m.
"""
if l < 0:
raise ValueError('The spherical harmonic degree must be positive. '
'Input value is {:s}'.format(repr(l)))
if m < 0:
raise ValueError('The angular order must be positive. '
'Input value is {:s}'.format(repr(m)))
if m >= l:
raise ValueError('The angular order must be less than or equal to '
'the spherical harmonic degree. Input degree is {:s}.'
' Input order is {:s}.'.format(repr(l), repr(m)))
return l**2 + (i - 1) * l + m
|
f1e1df825b69c33913096af1cb6e20b7d2db72ce
|
scrapi/harvesters/pubmedcentral.py
|
scrapi/harvesters/pubmedcentral.py
|
"""
Harvester of pubmed for the SHARE notification service
"""
from __future__ import unicode_literals
from scrapi.base import schemas
from scrapi.base import helpers
from scrapi.base import OAIHarvester
def oai_extract_url_pubmed(identifiers):
identifiers = [identifiers] if not isinstance(identifiers, list) else identifiers
for item in identifiers:
try:
found_url = helpers.URL_REGEX.search(item).group()
if 'viewcontent' not in found_url and '/pubmed/' in found_url:
return found_url.decode('utf-8')
except AttributeError:
continue
class PubMedHarvester(OAIHarvester):
short_name = 'pubmedcentral'
long_name = 'PubMed Central'
url = 'http://www.ncbi.nlm.nih.gov/pmc/'
schema = helpers.updated_schema(
schemas.OAISCHEMA,
{
"uris": {
"canonicalUri": ('//dc:identifier/node()', oai_extract_url_pubmed)
}
}
)
base_url = 'http://www.pubmedcentral.nih.gov/oai/oai.cgi'
property_list = [
'type', 'source', 'publisher', 'rights',
'format', 'setSpec', 'date', 'identifier'
]
|
"""
Harvester of PubMed Central for the SHARE notification service
Example API call: http://www.pubmedcentral.nih.gov/oai/oai.cgi?verb=ListRecords&metadataPrefix=oai_dc&from=2015-04-13&until=2015-04-14
"""
from __future__ import unicode_literals
from scrapi.base import schemas
from scrapi.base import helpers
from scrapi.base import OAIHarvester
def oai_extract_url_pubmed(identifiers):
identifiers = [identifiers] if not isinstance(identifiers, list) else identifiers
for item in identifiers:
try:
found_url = helpers.URL_REGEX.search(item).group()
if 'viewcontent' not in found_url and '/pubmed/' in found_url:
return found_url.decode('utf-8')
except AttributeError:
continue
class PubMedHarvester(OAIHarvester):
short_name = 'pubmedcentral'
long_name = 'PubMed Central'
url = 'http://www.ncbi.nlm.nih.gov/pmc/'
schema = helpers.updated_schema(
schemas.OAISCHEMA,
{
"uris": {
"canonicalUri": ('//dc:identifier/node()', oai_extract_url_pubmed)
}
}
)
base_url = 'http://www.pubmedcentral.nih.gov/oai/oai.cgi'
property_list = [
'type', 'source', 'publisher', 'rights',
'format', 'setSpec', 'date', 'identifier'
]
|
Add API call to top docstring
|
Add API call to top docstring
|
Python
|
apache-2.0
|
CenterForOpenScience/scrapi,mehanig/scrapi,icereval/scrapi,mehanig/scrapi,CenterForOpenScience/scrapi,alexgarciac/scrapi,fabianvf/scrapi,felliott/scrapi,jeffreyliu3230/scrapi,felliott/scrapi,ostwald/scrapi,erinspace/scrapi,fabianvf/scrapi,erinspace/scrapi
|
"""
Harvester of pubmed for the SHARE notification service
"""
from __future__ import unicode_literals
from scrapi.base import schemas
from scrapi.base import helpers
from scrapi.base import OAIHarvester
def oai_extract_url_pubmed(identifiers):
identifiers = [identifiers] if not isinstance(identifiers, list) else identifiers
for item in identifiers:
try:
found_url = helpers.URL_REGEX.search(item).group()
if 'viewcontent' not in found_url and '/pubmed/' in found_url:
return found_url.decode('utf-8')
except AttributeError:
continue
class PubMedHarvester(OAIHarvester):
short_name = 'pubmedcentral'
long_name = 'PubMed Central'
url = 'http://www.ncbi.nlm.nih.gov/pmc/'
schema = helpers.updated_schema(
schemas.OAISCHEMA,
{
"uris": {
"canonicalUri": ('//dc:identifier/node()', oai_extract_url_pubmed)
}
}
)
base_url = 'http://www.pubmedcentral.nih.gov/oai/oai.cgi'
property_list = [
'type', 'source', 'publisher', 'rights',
'format', 'setSpec', 'date', 'identifier'
]
Add API call to top docstring
|
"""
Harvester of PubMed Central for the SHARE notification service
Example API call: http://www.pubmedcentral.nih.gov/oai/oai.cgi?verb=ListRecords&metadataPrefix=oai_dc&from=2015-04-13&until=2015-04-14
"""
from __future__ import unicode_literals
from scrapi.base import schemas
from scrapi.base import helpers
from scrapi.base import OAIHarvester
def oai_extract_url_pubmed(identifiers):
identifiers = [identifiers] if not isinstance(identifiers, list) else identifiers
for item in identifiers:
try:
found_url = helpers.URL_REGEX.search(item).group()
if 'viewcontent' not in found_url and '/pubmed/' in found_url:
return found_url.decode('utf-8')
except AttributeError:
continue
class PubMedHarvester(OAIHarvester):
short_name = 'pubmedcentral'
long_name = 'PubMed Central'
url = 'http://www.ncbi.nlm.nih.gov/pmc/'
schema = helpers.updated_schema(
schemas.OAISCHEMA,
{
"uris": {
"canonicalUri": ('//dc:identifier/node()', oai_extract_url_pubmed)
}
}
)
base_url = 'http://www.pubmedcentral.nih.gov/oai/oai.cgi'
property_list = [
'type', 'source', 'publisher', 'rights',
'format', 'setSpec', 'date', 'identifier'
]
|
<commit_before>"""
Harvester of pubmed for the SHARE notification service
"""
from __future__ import unicode_literals
from scrapi.base import schemas
from scrapi.base import helpers
from scrapi.base import OAIHarvester
def oai_extract_url_pubmed(identifiers):
identifiers = [identifiers] if not isinstance(identifiers, list) else identifiers
for item in identifiers:
try:
found_url = helpers.URL_REGEX.search(item).group()
if 'viewcontent' not in found_url and '/pubmed/' in found_url:
return found_url.decode('utf-8')
except AttributeError:
continue
class PubMedHarvester(OAIHarvester):
short_name = 'pubmedcentral'
long_name = 'PubMed Central'
url = 'http://www.ncbi.nlm.nih.gov/pmc/'
schema = helpers.updated_schema(
schemas.OAISCHEMA,
{
"uris": {
"canonicalUri": ('//dc:identifier/node()', oai_extract_url_pubmed)
}
}
)
base_url = 'http://www.pubmedcentral.nih.gov/oai/oai.cgi'
property_list = [
'type', 'source', 'publisher', 'rights',
'format', 'setSpec', 'date', 'identifier'
]
<commit_msg>Add API call to top docstring<commit_after>
|
"""
Harvester of PubMed Central for the SHARE notification service
Example API call: http://www.pubmedcentral.nih.gov/oai/oai.cgi?verb=ListRecords&metadataPrefix=oai_dc&from=2015-04-13&until=2015-04-14
"""
from __future__ import unicode_literals
from scrapi.base import schemas
from scrapi.base import helpers
from scrapi.base import OAIHarvester
def oai_extract_url_pubmed(identifiers):
identifiers = [identifiers] if not isinstance(identifiers, list) else identifiers
for item in identifiers:
try:
found_url = helpers.URL_REGEX.search(item).group()
if 'viewcontent' not in found_url and '/pubmed/' in found_url:
return found_url.decode('utf-8')
except AttributeError:
continue
class PubMedHarvester(OAIHarvester):
short_name = 'pubmedcentral'
long_name = 'PubMed Central'
url = 'http://www.ncbi.nlm.nih.gov/pmc/'
schema = helpers.updated_schema(
schemas.OAISCHEMA,
{
"uris": {
"canonicalUri": ('//dc:identifier/node()', oai_extract_url_pubmed)
}
}
)
base_url = 'http://www.pubmedcentral.nih.gov/oai/oai.cgi'
property_list = [
'type', 'source', 'publisher', 'rights',
'format', 'setSpec', 'date', 'identifier'
]
|
"""
Harvester of pubmed for the SHARE notification service
"""
from __future__ import unicode_literals
from scrapi.base import schemas
from scrapi.base import helpers
from scrapi.base import OAIHarvester
def oai_extract_url_pubmed(identifiers):
identifiers = [identifiers] if not isinstance(identifiers, list) else identifiers
for item in identifiers:
try:
found_url = helpers.URL_REGEX.search(item).group()
if 'viewcontent' not in found_url and '/pubmed/' in found_url:
return found_url.decode('utf-8')
except AttributeError:
continue
class PubMedHarvester(OAIHarvester):
short_name = 'pubmedcentral'
long_name = 'PubMed Central'
url = 'http://www.ncbi.nlm.nih.gov/pmc/'
schema = helpers.updated_schema(
schemas.OAISCHEMA,
{
"uris": {
"canonicalUri": ('//dc:identifier/node()', oai_extract_url_pubmed)
}
}
)
base_url = 'http://www.pubmedcentral.nih.gov/oai/oai.cgi'
property_list = [
'type', 'source', 'publisher', 'rights',
'format', 'setSpec', 'date', 'identifier'
]
Add API call to top docstring"""
Harvester of PubMed Central for the SHARE notification service
Example API call: http://www.pubmedcentral.nih.gov/oai/oai.cgi?verb=ListRecords&metadataPrefix=oai_dc&from=2015-04-13&until=2015-04-14
"""
from __future__ import unicode_literals
from scrapi.base import schemas
from scrapi.base import helpers
from scrapi.base import OAIHarvester
def oai_extract_url_pubmed(identifiers):
identifiers = [identifiers] if not isinstance(identifiers, list) else identifiers
for item in identifiers:
try:
found_url = helpers.URL_REGEX.search(item).group()
if 'viewcontent' not in found_url and '/pubmed/' in found_url:
return found_url.decode('utf-8')
except AttributeError:
continue
class PubMedHarvester(OAIHarvester):
short_name = 'pubmedcentral'
long_name = 'PubMed Central'
url = 'http://www.ncbi.nlm.nih.gov/pmc/'
schema = helpers.updated_schema(
schemas.OAISCHEMA,
{
"uris": {
"canonicalUri": ('//dc:identifier/node()', oai_extract_url_pubmed)
}
}
)
base_url = 'http://www.pubmedcentral.nih.gov/oai/oai.cgi'
property_list = [
'type', 'source', 'publisher', 'rights',
'format', 'setSpec', 'date', 'identifier'
]
|
<commit_before>"""
Harvester of pubmed for the SHARE notification service
"""
from __future__ import unicode_literals
from scrapi.base import schemas
from scrapi.base import helpers
from scrapi.base import OAIHarvester
def oai_extract_url_pubmed(identifiers):
identifiers = [identifiers] if not isinstance(identifiers, list) else identifiers
for item in identifiers:
try:
found_url = helpers.URL_REGEX.search(item).group()
if 'viewcontent' not in found_url and '/pubmed/' in found_url:
return found_url.decode('utf-8')
except AttributeError:
continue
class PubMedHarvester(OAIHarvester):
short_name = 'pubmedcentral'
long_name = 'PubMed Central'
url = 'http://www.ncbi.nlm.nih.gov/pmc/'
schema = helpers.updated_schema(
schemas.OAISCHEMA,
{
"uris": {
"canonicalUri": ('//dc:identifier/node()', oai_extract_url_pubmed)
}
}
)
base_url = 'http://www.pubmedcentral.nih.gov/oai/oai.cgi'
property_list = [
'type', 'source', 'publisher', 'rights',
'format', 'setSpec', 'date', 'identifier'
]
<commit_msg>Add API call to top docstring<commit_after>"""
Harvester of PubMed Central for the SHARE notification service
Example API call: http://www.pubmedcentral.nih.gov/oai/oai.cgi?verb=ListRecords&metadataPrefix=oai_dc&from=2015-04-13&until=2015-04-14
"""
from __future__ import unicode_literals
from scrapi.base import schemas
from scrapi.base import helpers
from scrapi.base import OAIHarvester
def oai_extract_url_pubmed(identifiers):
identifiers = [identifiers] if not isinstance(identifiers, list) else identifiers
for item in identifiers:
try:
found_url = helpers.URL_REGEX.search(item).group()
if 'viewcontent' not in found_url and '/pubmed/' in found_url:
return found_url.decode('utf-8')
except AttributeError:
continue
class PubMedHarvester(OAIHarvester):
short_name = 'pubmedcentral'
long_name = 'PubMed Central'
url = 'http://www.ncbi.nlm.nih.gov/pmc/'
schema = helpers.updated_schema(
schemas.OAISCHEMA,
{
"uris": {
"canonicalUri": ('//dc:identifier/node()', oai_extract_url_pubmed)
}
}
)
base_url = 'http://www.pubmedcentral.nih.gov/oai/oai.cgi'
property_list = [
'type', 'source', 'publisher', 'rights',
'format', 'setSpec', 'date', 'identifier'
]
|
631f9edec1574054ef5612b652b94397af141d7a
|
tests/test_rule.py
|
tests/test_rule.py
|
from datetime import datetime
from unittest import TestCase
from rule import PriceRule
from stock import Stock
class TestPriceRule(TestCase):
@classmethod
def setUpClass(cls):
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 10), 11)
cls.exchange = {"GOOG": goog}
def test_a_PriceRule_matches_when_it_meets_the_condition(self):
"""Tests if true is returned when an exchange matches a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price > 10)
self.assertTrue(rule.matches(self.exchange))
|
from datetime import datetime
from unittest import TestCase
from rule import PriceRule
from stock import Stock
class TestPriceRule(TestCase):
@classmethod
def setUpClass(cls):
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 10), 11)
cls.exchange = {"GOOG": goog}
def test_a_PriceRule_matches_when_it_meets_the_condition(self):
"""Tests if true is returned when an exchange matches a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price > 10)
self.assertTrue(rule.matches(self.exchange))
def test_a_PriceRule_is_False_if_the_condition_is_not_met(self):
"""Tests if false is returned when an exchange does not match a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price < 10)
self.assertFalse(rule.matches(self.exchange))
|
Add a PriceRule test if a condition is not met.
|
Add a PriceRule test if a condition is not met.
|
Python
|
mit
|
bsmukasa/stock_alerter
|
from datetime import datetime
from unittest import TestCase
from rule import PriceRule
from stock import Stock
class TestPriceRule(TestCase):
@classmethod
def setUpClass(cls):
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 10), 11)
cls.exchange = {"GOOG": goog}
def test_a_PriceRule_matches_when_it_meets_the_condition(self):
"""Tests if true is returned when an exchange matches a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price > 10)
self.assertTrue(rule.matches(self.exchange))
Add a PriceRule test if a condition is not met.
|
from datetime import datetime
from unittest import TestCase
from rule import PriceRule
from stock import Stock
class TestPriceRule(TestCase):
@classmethod
def setUpClass(cls):
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 10), 11)
cls.exchange = {"GOOG": goog}
def test_a_PriceRule_matches_when_it_meets_the_condition(self):
"""Tests if true is returned when an exchange matches a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price > 10)
self.assertTrue(rule.matches(self.exchange))
def test_a_PriceRule_is_False_if_the_condition_is_not_met(self):
"""Tests if false is returned when an exchange does not match a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price < 10)
self.assertFalse(rule.matches(self.exchange))
|
<commit_before>from datetime import datetime
from unittest import TestCase
from rule import PriceRule
from stock import Stock
class TestPriceRule(TestCase):
@classmethod
def setUpClass(cls):
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 10), 11)
cls.exchange = {"GOOG": goog}
def test_a_PriceRule_matches_when_it_meets_the_condition(self):
"""Tests if true is returned when an exchange matches a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price > 10)
self.assertTrue(rule.matches(self.exchange))
<commit_msg>Add a PriceRule test if a condition is not met.<commit_after>
|
from datetime import datetime
from unittest import TestCase
from rule import PriceRule
from stock import Stock
class TestPriceRule(TestCase):
@classmethod
def setUpClass(cls):
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 10), 11)
cls.exchange = {"GOOG": goog}
def test_a_PriceRule_matches_when_it_meets_the_condition(self):
"""Tests if true is returned when an exchange matches a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price > 10)
self.assertTrue(rule.matches(self.exchange))
def test_a_PriceRule_is_False_if_the_condition_is_not_met(self):
"""Tests if false is returned when an exchange does not match a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price < 10)
self.assertFalse(rule.matches(self.exchange))
|
from datetime import datetime
from unittest import TestCase
from rule import PriceRule
from stock import Stock
class TestPriceRule(TestCase):
@classmethod
def setUpClass(cls):
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 10), 11)
cls.exchange = {"GOOG": goog}
def test_a_PriceRule_matches_when_it_meets_the_condition(self):
"""Tests if true is returned when an exchange matches a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price > 10)
self.assertTrue(rule.matches(self.exchange))
Add a PriceRule test if a condition is not met.from datetime import datetime
from unittest import TestCase
from rule import PriceRule
from stock import Stock
class TestPriceRule(TestCase):
@classmethod
def setUpClass(cls):
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 10), 11)
cls.exchange = {"GOOG": goog}
def test_a_PriceRule_matches_when_it_meets_the_condition(self):
"""Tests if true is returned when an exchange matches a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price > 10)
self.assertTrue(rule.matches(self.exchange))
def test_a_PriceRule_is_False_if_the_condition_is_not_met(self):
"""Tests if false is returned when an exchange does not match a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price < 10)
self.assertFalse(rule.matches(self.exchange))
|
<commit_before>from datetime import datetime
from unittest import TestCase
from rule import PriceRule
from stock import Stock
class TestPriceRule(TestCase):
@classmethod
def setUpClass(cls):
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 10), 11)
cls.exchange = {"GOOG": goog}
def test_a_PriceRule_matches_when_it_meets_the_condition(self):
"""Tests if true is returned when an exchange matches a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price > 10)
self.assertTrue(rule.matches(self.exchange))
<commit_msg>Add a PriceRule test if a condition is not met.<commit_after>from datetime import datetime
from unittest import TestCase
from rule import PriceRule
from stock import Stock
class TestPriceRule(TestCase):
@classmethod
def setUpClass(cls):
goog = Stock("GOOG")
goog.update(datetime(2014, 2, 10), 11)
cls.exchange = {"GOOG": goog}
def test_a_PriceRule_matches_when_it_meets_the_condition(self):
"""Tests if true is returned when an exchange matches a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price > 10)
self.assertTrue(rule.matches(self.exchange))
def test_a_PriceRule_is_False_if_the_condition_is_not_met(self):
"""Tests if false is returned when an exchange does not match a rule.
"""
rule = PriceRule("GOOG", lambda stock: stock.price < 10)
self.assertFalse(rule.matches(self.exchange))
|
2c7621143a9d110ebb1ea5dc7884f2c21e2786b5
|
microgear/cache.py
|
microgear/cache.py
|
import os
import json
import sys
def get_item(key):
try:
return json.loads(open(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),key), "rb").read().decode('UTF-8'))["_"]
except (IOError, ValueError):
return None
def set_item(key,value):
open(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),key), "wb").write(json.dumps({"_": value}).encode('UTF-8'))
return value
def delete_item(key):
if os.path.isfile(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),key)):
os.remove(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),key))
|
import os
import json
import sys
CURRENT_DIR = os.path.abspath(os.path.dirname(sys.argv[0]))
def get_item(key):
"""Return content in cached file in JSON format"""
CACHED_KEY_FILE = os.path.join(CURRENT_DIR, key)
try:
return json.loads(open(CACHED_KEY_FILE, "rb").read().decode('UTF-8'))["_"]
except (IOError, ValueError):
return None
def set_item(key,value):
"""Write JSON content from value argument to cached file and return"""
CACHED_KEY_FILE = os.path.join(CURRENT_DIR, key)
open(CACHED_KEY_FILE, "wb").write(json.dumps({"_": value}).encode('UTF-8'))
return value
def delete_item(key):
"""Delete cached file if present"""
CACHED_KEY_FILE = os.path.join(CURRENT_DIR, key)
if os.path.isfile(CACHED_KEY_FILE):
os.remove(CACHED_KEY_FILE)
|
Add docstring to function and refactor some code for clarification
|
Add docstring to function and refactor some code for clarification
|
Python
|
isc
|
netpieio/microgear-python
|
import os
import json
import sys
def get_item(key):
try:
return json.loads(open(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),key), "rb").read().decode('UTF-8'))["_"]
except (IOError, ValueError):
return None
def set_item(key,value):
open(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),key), "wb").write(json.dumps({"_": value}).encode('UTF-8'))
return value
def delete_item(key):
if os.path.isfile(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),key)):
os.remove(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),key))
Add docstring to function and refactor some code for clarification
|
import os
import json
import sys
CURRENT_DIR = os.path.abspath(os.path.dirname(sys.argv[0]))
def get_item(key):
"""Return content in cached file in JSON format"""
CACHED_KEY_FILE = os.path.join(CURRENT_DIR, key)
try:
return json.loads(open(CACHED_KEY_FILE, "rb").read().decode('UTF-8'))["_"]
except (IOError, ValueError):
return None
def set_item(key,value):
"""Write JSON content from value argument to cached file and return"""
CACHED_KEY_FILE = os.path.join(CURRENT_DIR, key)
open(CACHED_KEY_FILE, "wb").write(json.dumps({"_": value}).encode('UTF-8'))
return value
def delete_item(key):
"""Delete cached file if present"""
CACHED_KEY_FILE = os.path.join(CURRENT_DIR, key)
if os.path.isfile(CACHED_KEY_FILE):
os.remove(CACHED_KEY_FILE)
|
<commit_before>import os
import json
import sys
def get_item(key):
try:
return json.loads(open(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),key), "rb").read().decode('UTF-8'))["_"]
except (IOError, ValueError):
return None
def set_item(key,value):
open(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),key), "wb").write(json.dumps({"_": value}).encode('UTF-8'))
return value
def delete_item(key):
if os.path.isfile(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),key)):
os.remove(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),key))
<commit_msg>Add docstring to function and refactor some code for clarification<commit_after>
|
import os
import json
import sys
CURRENT_DIR = os.path.abspath(os.path.dirname(sys.argv[0]))
def get_item(key):
"""Return content in cached file in JSON format"""
CACHED_KEY_FILE = os.path.join(CURRENT_DIR, key)
try:
return json.loads(open(CACHED_KEY_FILE, "rb").read().decode('UTF-8'))["_"]
except (IOError, ValueError):
return None
def set_item(key,value):
"""Write JSON content from value argument to cached file and return"""
CACHED_KEY_FILE = os.path.join(CURRENT_DIR, key)
open(CACHED_KEY_FILE, "wb").write(json.dumps({"_": value}).encode('UTF-8'))
return value
def delete_item(key):
"""Delete cached file if present"""
CACHED_KEY_FILE = os.path.join(CURRENT_DIR, key)
if os.path.isfile(CACHED_KEY_FILE):
os.remove(CACHED_KEY_FILE)
|
import os
import json
import sys
def get_item(key):
try:
return json.loads(open(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),key), "rb").read().decode('UTF-8'))["_"]
except (IOError, ValueError):
return None
def set_item(key,value):
open(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),key), "wb").write(json.dumps({"_": value}).encode('UTF-8'))
return value
def delete_item(key):
if os.path.isfile(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),key)):
os.remove(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),key))
Add docstring to function and refactor some code for clarificationimport os
import json
import sys
CURRENT_DIR = os.path.abspath(os.path.dirname(sys.argv[0]))
def get_item(key):
"""Return content in cached file in JSON format"""
CACHED_KEY_FILE = os.path.join(CURRENT_DIR, key)
try:
return json.loads(open(CACHED_KEY_FILE, "rb").read().decode('UTF-8'))["_"]
except (IOError, ValueError):
return None
def set_item(key,value):
"""Write JSON content from value argument to cached file and return"""
CACHED_KEY_FILE = os.path.join(CURRENT_DIR, key)
open(CACHED_KEY_FILE, "wb").write(json.dumps({"_": value}).encode('UTF-8'))
return value
def delete_item(key):
"""Delete cached file if present"""
CACHED_KEY_FILE = os.path.join(CURRENT_DIR, key)
if os.path.isfile(CACHED_KEY_FILE):
os.remove(CACHED_KEY_FILE)
|
<commit_before>import os
import json
import sys
def get_item(key):
try:
return json.loads(open(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),key), "rb").read().decode('UTF-8'))["_"]
except (IOError, ValueError):
return None
def set_item(key,value):
open(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),key), "wb").write(json.dumps({"_": value}).encode('UTF-8'))
return value
def delete_item(key):
if os.path.isfile(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),key)):
os.remove(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])),key))
<commit_msg>Add docstring to function and refactor some code for clarification<commit_after>import os
import json
import sys
CURRENT_DIR = os.path.abspath(os.path.dirname(sys.argv[0]))
def get_item(key):
"""Return content in cached file in JSON format"""
CACHED_KEY_FILE = os.path.join(CURRENT_DIR, key)
try:
return json.loads(open(CACHED_KEY_FILE, "rb").read().decode('UTF-8'))["_"]
except (IOError, ValueError):
return None
def set_item(key,value):
"""Write JSON content from value argument to cached file and return"""
CACHED_KEY_FILE = os.path.join(CURRENT_DIR, key)
open(CACHED_KEY_FILE, "wb").write(json.dumps({"_": value}).encode('UTF-8'))
return value
def delete_item(key):
"""Delete cached file if present"""
CACHED_KEY_FILE = os.path.join(CURRENT_DIR, key)
if os.path.isfile(CACHED_KEY_FILE):
os.remove(CACHED_KEY_FILE)
|
8b669c8e242bb3a66527edb004ea6feab8258168
|
scripts/lib/get_old_dict_values.py
|
scripts/lib/get_old_dict_values.py
|
from .KEYNOTFOUND import KEYNOTFOUNDIN1
from .dict_diff import dict_diff
def get_old_dict_values(old, new):
# Returns the "old" value for two dicts.
diff = dict_diff(old, new)
return {key: diff[key][0] if diff[key][0] != KEYNOTFOUNDIN1 else None for key in diff}
|
from .KEYNOTFOUND import KEYNOTFOUNDIN1
from .dict_diff import dict_diff
def get_old_dict_values(old, new):
# Returns the "old" value for two dicts.
diff = dict_diff(old, new)
return {key: diff[key][0]
if diff[key][0] != KEYNOTFOUNDIN1
else None
for key in diff}
|
Expand an object comprehension onto several lines
|
Expand an object comprehension onto several lines
|
Python
|
mit
|
StoDevX/course-data-tools,StoDevX/course-data-tools
|
from .KEYNOTFOUND import KEYNOTFOUNDIN1
from .dict_diff import dict_diff
def get_old_dict_values(old, new):
# Returns the "old" value for two dicts.
diff = dict_diff(old, new)
return {key: diff[key][0] if diff[key][0] != KEYNOTFOUNDIN1 else None for key in diff}
Expand an object comprehension onto several lines
|
from .KEYNOTFOUND import KEYNOTFOUNDIN1
from .dict_diff import dict_diff
def get_old_dict_values(old, new):
# Returns the "old" value for two dicts.
diff = dict_diff(old, new)
return {key: diff[key][0]
if diff[key][0] != KEYNOTFOUNDIN1
else None
for key in diff}
|
<commit_before>from .KEYNOTFOUND import KEYNOTFOUNDIN1
from .dict_diff import dict_diff
def get_old_dict_values(old, new):
# Returns the "old" value for two dicts.
diff = dict_diff(old, new)
return {key: diff[key][0] if diff[key][0] != KEYNOTFOUNDIN1 else None for key in diff}
<commit_msg>Expand an object comprehension onto several lines<commit_after>
|
from .KEYNOTFOUND import KEYNOTFOUNDIN1
from .dict_diff import dict_diff
def get_old_dict_values(old, new):
# Returns the "old" value for two dicts.
diff = dict_diff(old, new)
return {key: diff[key][0]
if diff[key][0] != KEYNOTFOUNDIN1
else None
for key in diff}
|
from .KEYNOTFOUND import KEYNOTFOUNDIN1
from .dict_diff import dict_diff
def get_old_dict_values(old, new):
# Returns the "old" value for two dicts.
diff = dict_diff(old, new)
return {key: diff[key][0] if diff[key][0] != KEYNOTFOUNDIN1 else None for key in diff}
Expand an object comprehension onto several linesfrom .KEYNOTFOUND import KEYNOTFOUNDIN1
from .dict_diff import dict_diff
def get_old_dict_values(old, new):
# Returns the "old" value for two dicts.
diff = dict_diff(old, new)
return {key: diff[key][0]
if diff[key][0] != KEYNOTFOUNDIN1
else None
for key in diff}
|
<commit_before>from .KEYNOTFOUND import KEYNOTFOUNDIN1
from .dict_diff import dict_diff
def get_old_dict_values(old, new):
# Returns the "old" value for two dicts.
diff = dict_diff(old, new)
return {key: diff[key][0] if diff[key][0] != KEYNOTFOUNDIN1 else None for key in diff}
<commit_msg>Expand an object comprehension onto several lines<commit_after>from .KEYNOTFOUND import KEYNOTFOUNDIN1
from .dict_diff import dict_diff
def get_old_dict_values(old, new):
# Returns the "old" value for two dicts.
diff = dict_diff(old, new)
return {key: diff[key][0]
if diff[key][0] != KEYNOTFOUNDIN1
else None
for key in diff}
|
562fa35a036a43526b55546d97490b3f36001a18
|
robotpy_ext/misc/periodic_filter.py
|
robotpy_ext/misc/periodic_filter.py
|
import logging
import time
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period, bypassLevel=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
:param bypassLevel: Lowest logging level that the filter should ignore
'''
self._period = period
self._loggingLoop = True
self._last_log = -period
self._bypassLevel = bypassLevel
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self._loggingLoop or record.levelno >= self._bypassLevel
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = time.monotonic()
self._loggingLoop = False
if now - self._last_log > self._period:
self._loggingLoop = True
self._last_log = now
|
import logging
import time
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher,
unless given a different bypass level
Example
class Component1:
def setup(self):
# Set period to 3 seconds, set bypass_level to WARN
self.logger.addFilter(PeriodicFilter(3, bypass_level=logging.WARN))
def execute(self):
# This message will be printed once every three seconds
self.logger.info('Component1 Executing')
# This message will be printed out every loop
self.logger.warn('Uh oh, this shouldn't have happened...')
"""
def __init__(self, period, bypass_level=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
:param bypass_level: Lowest logging level that the filter should ignore
'''
self._period = period
self._loggingLoop = True
self._last_log = -period
self._bypass_level = bypass_level
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self._loggingLoop or record.levelno >= self._bypass_level
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = time.monotonic()
self._loggingLoop = False
if now - self._last_log > self._period:
self._loggingLoop = True
self._last_log = now
|
Create example usage. Rename bypass_level
|
Create example usage. Rename bypass_level
|
Python
|
bsd-3-clause
|
robotpy/robotpy-wpilib-utilities,Twinters007/robotpy-wpilib-utilities,robotpy/robotpy-wpilib-utilities,Twinters007/robotpy-wpilib-utilities
|
import logging
import time
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period, bypassLevel=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
:param bypassLevel: Lowest logging level that the filter should ignore
'''
self._period = period
self._loggingLoop = True
self._last_log = -period
self._bypassLevel = bypassLevel
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self._loggingLoop or record.levelno >= self._bypassLevel
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = time.monotonic()
self._loggingLoop = False
if now - self._last_log > self._period:
self._loggingLoop = True
self._last_log = now
Create example usage. Rename bypass_level
|
import logging
import time
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher,
unless given a different bypass level
Example
class Component1:
def setup(self):
# Set period to 3 seconds, set bypass_level to WARN
self.logger.addFilter(PeriodicFilter(3, bypass_level=logging.WARN))
def execute(self):
# This message will be printed once every three seconds
self.logger.info('Component1 Executing')
# This message will be printed out every loop
self.logger.warn('Uh oh, this shouldn't have happened...')
"""
def __init__(self, period, bypass_level=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
:param bypass_level: Lowest logging level that the filter should ignore
'''
self._period = period
self._loggingLoop = True
self._last_log = -period
self._bypass_level = bypass_level
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self._loggingLoop or record.levelno >= self._bypass_level
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = time.monotonic()
self._loggingLoop = False
if now - self._last_log > self._period:
self._loggingLoop = True
self._last_log = now
|
<commit_before>import logging
import time
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period, bypassLevel=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
:param bypassLevel: Lowest logging level that the filter should ignore
'''
self._period = period
self._loggingLoop = True
self._last_log = -period
self._bypassLevel = bypassLevel
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self._loggingLoop or record.levelno >= self._bypassLevel
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = time.monotonic()
self._loggingLoop = False
if now - self._last_log > self._period:
self._loggingLoop = True
self._last_log = now
<commit_msg>Create example usage. Rename bypass_level<commit_after>
|
import logging
import time
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher,
unless given a different bypass level
Example
class Component1:
def setup(self):
# Set period to 3 seconds, set bypass_level to WARN
self.logger.addFilter(PeriodicFilter(3, bypass_level=logging.WARN))
def execute(self):
# This message will be printed once every three seconds
self.logger.info('Component1 Executing')
# This message will be printed out every loop
self.logger.warn('Uh oh, this shouldn't have happened...')
"""
def __init__(self, period, bypass_level=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
:param bypass_level: Lowest logging level that the filter should ignore
'''
self._period = period
self._loggingLoop = True
self._last_log = -period
self._bypass_level = bypass_level
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self._loggingLoop or record.levelno >= self._bypass_level
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = time.monotonic()
self._loggingLoop = False
if now - self._last_log > self._period:
self._loggingLoop = True
self._last_log = now
|
import logging
import time
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period, bypassLevel=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
:param bypassLevel: Lowest logging level that the filter should ignore
'''
self._period = period
self._loggingLoop = True
self._last_log = -period
self._bypassLevel = bypassLevel
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self._loggingLoop or record.levelno >= self._bypassLevel
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = time.monotonic()
self._loggingLoop = False
if now - self._last_log > self._period:
self._loggingLoop = True
self._last_log = now
Create example usage. Rename bypass_levelimport logging
import time
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher,
unless given a different bypass level
Example
class Component1:
def setup(self):
# Set period to 3 seconds, set bypass_level to WARN
self.logger.addFilter(PeriodicFilter(3, bypass_level=logging.WARN))
def execute(self):
# This message will be printed once every three seconds
self.logger.info('Component1 Executing')
# This message will be printed out every loop
self.logger.warn('Uh oh, this shouldn't have happened...')
"""
def __init__(self, period, bypass_level=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
:param bypass_level: Lowest logging level that the filter should ignore
'''
self._period = period
self._loggingLoop = True
self._last_log = -period
self._bypass_level = bypass_level
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self._loggingLoop or record.levelno >= self._bypass_level
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = time.monotonic()
self._loggingLoop = False
if now - self._last_log > self._period:
self._loggingLoop = True
self._last_log = now
|
<commit_before>import logging
import time
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period, bypassLevel=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
:param bypassLevel: Lowest logging level that the filter should ignore
'''
self._period = period
self._loggingLoop = True
self._last_log = -period
self._bypassLevel = bypassLevel
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self._loggingLoop or record.levelno >= self._bypassLevel
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = time.monotonic()
self._loggingLoop = False
if now - self._last_log > self._period:
self._loggingLoop = True
self._last_log = now
<commit_msg>Create example usage. Rename bypass_level<commit_after>import logging
import time
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher,
unless given a different bypass level
Example
class Component1:
def setup(self):
# Set period to 3 seconds, set bypass_level to WARN
self.logger.addFilter(PeriodicFilter(3, bypass_level=logging.WARN))
def execute(self):
# This message will be printed once every three seconds
self.logger.info('Component1 Executing')
# This message will be printed out every loop
self.logger.warn('Uh oh, this shouldn't have happened...')
"""
def __init__(self, period, bypass_level=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
:param bypass_level: Lowest logging level that the filter should ignore
'''
self._period = period
self._loggingLoop = True
self._last_log = -period
self._bypass_level = bypass_level
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self._loggingLoop or record.levelno >= self._bypass_level
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = time.monotonic()
self._loggingLoop = False
if now - self._last_log > self._period:
self._loggingLoop = True
self._last_log = now
|
ef72be28dc83ff2c73335c6eb13135cab8affe53
|
troposphere/sso.py
|
troposphere/sso.py
|
# Copyright (c) 2012-2020, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 18.6.0
from . import AWSObject
from troposphere import Tags
class Assignment(AWSObject):
resource_type = "AWS::SSO::Assignment"
props = {
'InstanceArn': (basestring, True),
'PermissionSetArn': (basestring, True),
'PrincipalId': (basestring, True),
'PrincipalType': (basestring, True),
'TargetId': (basestring, True),
'TargetType': (basestring, True),
}
class PermissionSet(AWSObject):
resource_type = "AWS::SSO::PermissionSet"
props = {
'Description': (basestring, False),
'InlinePolicy': (basestring, False),
'InstanceArn': (basestring, True),
'ManagedPolicies': ([basestring], False),
'Name': (basestring, True),
'RelayStateType': (basestring, False),
'SessionDuration': (basestring, False),
'Tags': (Tags, False),
}
|
# Copyright (c) 2012-2021, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 25.0.0
from . import AWSObject
from . import AWSProperty
from troposphere import Tags
class Assignment(AWSObject):
resource_type = "AWS::SSO::Assignment"
props = {
'InstanceArn': (basestring, True),
'PermissionSetArn': (basestring, True),
'PrincipalId': (basestring, True),
'PrincipalType': (basestring, True),
'TargetId': (basestring, True),
'TargetType': (basestring, True),
}
class AccessControlAttributeValueSourceList(AWSProperty):
props = {
'AccessControlAttributeValueSourceList': ([basestring], False),
}
class AccessControlAttributeValue(AWSProperty):
props = {
'Source': (AccessControlAttributeValueSourceList, True),
}
class AccessControlAttribute(AWSProperty):
props = {
'Key': (basestring, True),
'Value': (AccessControlAttributeValue, True),
}
class InstanceAccessControlAttributeConfiguration(AWSObject):
resource_type = "AWS::SSO::InstanceAccessControlAttributeConfiguration"
props = {
'AccessControlAttributes': ([AccessControlAttribute], False),
'InstanceAccessControlAttributeConfiguration': (dict, False),
'InstanceArn': (basestring, True),
}
class PermissionSet(AWSObject):
resource_type = "AWS::SSO::PermissionSet"
props = {
'Description': (basestring, False),
'InlinePolicy': (dict, False),
'InstanceArn': (basestring, True),
'ManagedPolicies': ([basestring], False),
'Name': (basestring, True),
'RelayStateType': (basestring, False),
'SessionDuration': (basestring, False),
'Tags': (Tags, False),
}
|
Update SSO per 2020-12-18 changes
|
Update SSO per 2020-12-18 changes
|
Python
|
bsd-2-clause
|
cloudtools/troposphere,cloudtools/troposphere
|
# Copyright (c) 2012-2020, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 18.6.0
from . import AWSObject
from troposphere import Tags
class Assignment(AWSObject):
resource_type = "AWS::SSO::Assignment"
props = {
'InstanceArn': (basestring, True),
'PermissionSetArn': (basestring, True),
'PrincipalId': (basestring, True),
'PrincipalType': (basestring, True),
'TargetId': (basestring, True),
'TargetType': (basestring, True),
}
class PermissionSet(AWSObject):
resource_type = "AWS::SSO::PermissionSet"
props = {
'Description': (basestring, False),
'InlinePolicy': (basestring, False),
'InstanceArn': (basestring, True),
'ManagedPolicies': ([basestring], False),
'Name': (basestring, True),
'RelayStateType': (basestring, False),
'SessionDuration': (basestring, False),
'Tags': (Tags, False),
}
Update SSO per 2020-12-18 changes
|
# Copyright (c) 2012-2021, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 25.0.0
from . import AWSObject
from . import AWSProperty
from troposphere import Tags
class Assignment(AWSObject):
resource_type = "AWS::SSO::Assignment"
props = {
'InstanceArn': (basestring, True),
'PermissionSetArn': (basestring, True),
'PrincipalId': (basestring, True),
'PrincipalType': (basestring, True),
'TargetId': (basestring, True),
'TargetType': (basestring, True),
}
class AccessControlAttributeValueSourceList(AWSProperty):
props = {
'AccessControlAttributeValueSourceList': ([basestring], False),
}
class AccessControlAttributeValue(AWSProperty):
props = {
'Source': (AccessControlAttributeValueSourceList, True),
}
class AccessControlAttribute(AWSProperty):
props = {
'Key': (basestring, True),
'Value': (AccessControlAttributeValue, True),
}
class InstanceAccessControlAttributeConfiguration(AWSObject):
resource_type = "AWS::SSO::InstanceAccessControlAttributeConfiguration"
props = {
'AccessControlAttributes': ([AccessControlAttribute], False),
'InstanceAccessControlAttributeConfiguration': (dict, False),
'InstanceArn': (basestring, True),
}
class PermissionSet(AWSObject):
resource_type = "AWS::SSO::PermissionSet"
props = {
'Description': (basestring, False),
'InlinePolicy': (dict, False),
'InstanceArn': (basestring, True),
'ManagedPolicies': ([basestring], False),
'Name': (basestring, True),
'RelayStateType': (basestring, False),
'SessionDuration': (basestring, False),
'Tags': (Tags, False),
}
|
<commit_before># Copyright (c) 2012-2020, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 18.6.0
from . import AWSObject
from troposphere import Tags
class Assignment(AWSObject):
resource_type = "AWS::SSO::Assignment"
props = {
'InstanceArn': (basestring, True),
'PermissionSetArn': (basestring, True),
'PrincipalId': (basestring, True),
'PrincipalType': (basestring, True),
'TargetId': (basestring, True),
'TargetType': (basestring, True),
}
class PermissionSet(AWSObject):
resource_type = "AWS::SSO::PermissionSet"
props = {
'Description': (basestring, False),
'InlinePolicy': (basestring, False),
'InstanceArn': (basestring, True),
'ManagedPolicies': ([basestring], False),
'Name': (basestring, True),
'RelayStateType': (basestring, False),
'SessionDuration': (basestring, False),
'Tags': (Tags, False),
}
<commit_msg>Update SSO per 2020-12-18 changes<commit_after>
|
# Copyright (c) 2012-2021, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 25.0.0
from . import AWSObject
from . import AWSProperty
from troposphere import Tags
class Assignment(AWSObject):
resource_type = "AWS::SSO::Assignment"
props = {
'InstanceArn': (basestring, True),
'PermissionSetArn': (basestring, True),
'PrincipalId': (basestring, True),
'PrincipalType': (basestring, True),
'TargetId': (basestring, True),
'TargetType': (basestring, True),
}
class AccessControlAttributeValueSourceList(AWSProperty):
props = {
'AccessControlAttributeValueSourceList': ([basestring], False),
}
class AccessControlAttributeValue(AWSProperty):
props = {
'Source': (AccessControlAttributeValueSourceList, True),
}
class AccessControlAttribute(AWSProperty):
props = {
'Key': (basestring, True),
'Value': (AccessControlAttributeValue, True),
}
class InstanceAccessControlAttributeConfiguration(AWSObject):
resource_type = "AWS::SSO::InstanceAccessControlAttributeConfiguration"
props = {
'AccessControlAttributes': ([AccessControlAttribute], False),
'InstanceAccessControlAttributeConfiguration': (dict, False),
'InstanceArn': (basestring, True),
}
class PermissionSet(AWSObject):
resource_type = "AWS::SSO::PermissionSet"
props = {
'Description': (basestring, False),
'InlinePolicy': (dict, False),
'InstanceArn': (basestring, True),
'ManagedPolicies': ([basestring], False),
'Name': (basestring, True),
'RelayStateType': (basestring, False),
'SessionDuration': (basestring, False),
'Tags': (Tags, False),
}
|
# Copyright (c) 2012-2020, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 18.6.0
from . import AWSObject
from troposphere import Tags
class Assignment(AWSObject):
resource_type = "AWS::SSO::Assignment"
props = {
'InstanceArn': (basestring, True),
'PermissionSetArn': (basestring, True),
'PrincipalId': (basestring, True),
'PrincipalType': (basestring, True),
'TargetId': (basestring, True),
'TargetType': (basestring, True),
}
class PermissionSet(AWSObject):
resource_type = "AWS::SSO::PermissionSet"
props = {
'Description': (basestring, False),
'InlinePolicy': (basestring, False),
'InstanceArn': (basestring, True),
'ManagedPolicies': ([basestring], False),
'Name': (basestring, True),
'RelayStateType': (basestring, False),
'SessionDuration': (basestring, False),
'Tags': (Tags, False),
}
Update SSO per 2020-12-18 changes# Copyright (c) 2012-2021, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 25.0.0
from . import AWSObject
from . import AWSProperty
from troposphere import Tags
class Assignment(AWSObject):
resource_type = "AWS::SSO::Assignment"
props = {
'InstanceArn': (basestring, True),
'PermissionSetArn': (basestring, True),
'PrincipalId': (basestring, True),
'PrincipalType': (basestring, True),
'TargetId': (basestring, True),
'TargetType': (basestring, True),
}
class AccessControlAttributeValueSourceList(AWSProperty):
props = {
'AccessControlAttributeValueSourceList': ([basestring], False),
}
class AccessControlAttributeValue(AWSProperty):
props = {
'Source': (AccessControlAttributeValueSourceList, True),
}
class AccessControlAttribute(AWSProperty):
props = {
'Key': (basestring, True),
'Value': (AccessControlAttributeValue, True),
}
class InstanceAccessControlAttributeConfiguration(AWSObject):
resource_type = "AWS::SSO::InstanceAccessControlAttributeConfiguration"
props = {
'AccessControlAttributes': ([AccessControlAttribute], False),
'InstanceAccessControlAttributeConfiguration': (dict, False),
'InstanceArn': (basestring, True),
}
class PermissionSet(AWSObject):
resource_type = "AWS::SSO::PermissionSet"
props = {
'Description': (basestring, False),
'InlinePolicy': (dict, False),
'InstanceArn': (basestring, True),
'ManagedPolicies': ([basestring], False),
'Name': (basestring, True),
'RelayStateType': (basestring, False),
'SessionDuration': (basestring, False),
'Tags': (Tags, False),
}
|
<commit_before># Copyright (c) 2012-2020, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 18.6.0
from . import AWSObject
from troposphere import Tags
class Assignment(AWSObject):
resource_type = "AWS::SSO::Assignment"
props = {
'InstanceArn': (basestring, True),
'PermissionSetArn': (basestring, True),
'PrincipalId': (basestring, True),
'PrincipalType': (basestring, True),
'TargetId': (basestring, True),
'TargetType': (basestring, True),
}
class PermissionSet(AWSObject):
resource_type = "AWS::SSO::PermissionSet"
props = {
'Description': (basestring, False),
'InlinePolicy': (basestring, False),
'InstanceArn': (basestring, True),
'ManagedPolicies': ([basestring], False),
'Name': (basestring, True),
'RelayStateType': (basestring, False),
'SessionDuration': (basestring, False),
'Tags': (Tags, False),
}
<commit_msg>Update SSO per 2020-12-18 changes<commit_after># Copyright (c) 2012-2021, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 25.0.0
from . import AWSObject
from . import AWSProperty
from troposphere import Tags
class Assignment(AWSObject):
resource_type = "AWS::SSO::Assignment"
props = {
'InstanceArn': (basestring, True),
'PermissionSetArn': (basestring, True),
'PrincipalId': (basestring, True),
'PrincipalType': (basestring, True),
'TargetId': (basestring, True),
'TargetType': (basestring, True),
}
class AccessControlAttributeValueSourceList(AWSProperty):
props = {
'AccessControlAttributeValueSourceList': ([basestring], False),
}
class AccessControlAttributeValue(AWSProperty):
props = {
'Source': (AccessControlAttributeValueSourceList, True),
}
class AccessControlAttribute(AWSProperty):
props = {
'Key': (basestring, True),
'Value': (AccessControlAttributeValue, True),
}
class InstanceAccessControlAttributeConfiguration(AWSObject):
resource_type = "AWS::SSO::InstanceAccessControlAttributeConfiguration"
props = {
'AccessControlAttributes': ([AccessControlAttribute], False),
'InstanceAccessControlAttributeConfiguration': (dict, False),
'InstanceArn': (basestring, True),
}
class PermissionSet(AWSObject):
resource_type = "AWS::SSO::PermissionSet"
props = {
'Description': (basestring, False),
'InlinePolicy': (dict, False),
'InstanceArn': (basestring, True),
'ManagedPolicies': ([basestring], False),
'Name': (basestring, True),
'RelayStateType': (basestring, False),
'SessionDuration': (basestring, False),
'Tags': (Tags, False),
}
|
7c3a3283b3da0c01da012bb823d781036d1847b6
|
packages/syft/src/syft/core/node/common/node_table/node_route.py
|
packages/syft/src/syft/core/node/common/node_table/node_route.py
|
# third party
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import String
# relative
from . import Base
class NodeRoute(Base):
__tablename__ = "node_route"
id = Column(Integer(), primary_key=True, autoincrement=True)
node_id = Column(Integer, ForeignKey("node.id"))
host_or_ip = Column(String(255))
is_vpn = Column(Boolean(), default=False)
|
# third party
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import String
# relative
from . import Base
class NodeRoute(Base):
__tablename__ = "node_route"
id = Column(Integer(), primary_key=True, autoincrement=True)
node_id = Column(Integer, ForeignKey("node.id"))
host_or_ip = Column(String(255), default="")
is_vpn = Column(Boolean(), default=False)
vpn_endpoint = Column(String(255), default="")
vpn_key = Column(String(255), default="")
|
ADD vpn_endpoint and vpn_key columns
|
ADD vpn_endpoint and vpn_key columns
|
Python
|
apache-2.0
|
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
|
# third party
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import String
# relative
from . import Base
class NodeRoute(Base):
__tablename__ = "node_route"
id = Column(Integer(), primary_key=True, autoincrement=True)
node_id = Column(Integer, ForeignKey("node.id"))
host_or_ip = Column(String(255))
is_vpn = Column(Boolean(), default=False)
ADD vpn_endpoint and vpn_key columns
|
# third party
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import String
# relative
from . import Base
class NodeRoute(Base):
__tablename__ = "node_route"
id = Column(Integer(), primary_key=True, autoincrement=True)
node_id = Column(Integer, ForeignKey("node.id"))
host_or_ip = Column(String(255), default="")
is_vpn = Column(Boolean(), default=False)
vpn_endpoint = Column(String(255), default="")
vpn_key = Column(String(255), default="")
|
<commit_before># third party
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import String
# relative
from . import Base
class NodeRoute(Base):
__tablename__ = "node_route"
id = Column(Integer(), primary_key=True, autoincrement=True)
node_id = Column(Integer, ForeignKey("node.id"))
host_or_ip = Column(String(255))
is_vpn = Column(Boolean(), default=False)
<commit_msg>ADD vpn_endpoint and vpn_key columns<commit_after>
|
# third party
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import String
# relative
from . import Base
class NodeRoute(Base):
__tablename__ = "node_route"
id = Column(Integer(), primary_key=True, autoincrement=True)
node_id = Column(Integer, ForeignKey("node.id"))
host_or_ip = Column(String(255), default="")
is_vpn = Column(Boolean(), default=False)
vpn_endpoint = Column(String(255), default="")
vpn_key = Column(String(255), default="")
|
# third party
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import String
# relative
from . import Base
class NodeRoute(Base):
__tablename__ = "node_route"
id = Column(Integer(), primary_key=True, autoincrement=True)
node_id = Column(Integer, ForeignKey("node.id"))
host_or_ip = Column(String(255))
is_vpn = Column(Boolean(), default=False)
ADD vpn_endpoint and vpn_key columns# third party
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import String
# relative
from . import Base
class NodeRoute(Base):
__tablename__ = "node_route"
id = Column(Integer(), primary_key=True, autoincrement=True)
node_id = Column(Integer, ForeignKey("node.id"))
host_or_ip = Column(String(255), default="")
is_vpn = Column(Boolean(), default=False)
vpn_endpoint = Column(String(255), default="")
vpn_key = Column(String(255), default="")
|
<commit_before># third party
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import String
# relative
from . import Base
class NodeRoute(Base):
__tablename__ = "node_route"
id = Column(Integer(), primary_key=True, autoincrement=True)
node_id = Column(Integer, ForeignKey("node.id"))
host_or_ip = Column(String(255))
is_vpn = Column(Boolean(), default=False)
<commit_msg>ADD vpn_endpoint and vpn_key columns<commit_after># third party
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import String
# relative
from . import Base
class NodeRoute(Base):
__tablename__ = "node_route"
id = Column(Integer(), primary_key=True, autoincrement=True)
node_id = Column(Integer, ForeignKey("node.id"))
host_or_ip = Column(String(255), default="")
is_vpn = Column(Boolean(), default=False)
vpn_endpoint = Column(String(255), default="")
vpn_key = Column(String(255), default="")
|
deaedcef36238d59484611a63ac21d60707004d4
|
Do_not_deploy/query_outgoing_queue.py
|
Do_not_deploy/query_outgoing_queue.py
|
from kombu import Connection, Exchange, Queue
from flask import Flask
import os
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
@app.route("/getnextqueuemessage")
#Gets the next message from target queue. Returns the signed JSON.
def get_last_queue_message():
#: By default messages sent to exchanges are persistent (delivery_mode=2),
#: and queues and exchanges are durable.
exchange = Exchange()
connection = Connection(app.config['OUTGOING_QUEUE_HOSTNAME'])
# Create/access a queue bound to the connection.
queue = Queue(app.config['OUTGOING_QUEUE'], exchange, routing_key='#')(connection)
queue.declare()
message = queue.get()
if message:
signature = message.body
message.ack() #acknowledges message, ensuring its removal.
return signature
else:
return "no message", 404
@app.route("/removeallmessages")
#Gets the next message from target queue. Returns the signed JSON.
def remove_all_messages():
while True:
queue_message = get_last_queue_message()
if queue_message == 'no message':
break
return "done", 202
@app.route("/")
def check_status():
return "Everything is OK"
|
from kombu import Connection, Exchange, Queue
from flask import Flask
import os
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
@app.route("/getnextqueuemessage")
#Gets the next message from target queue. Returns the signed JSON.
def get_last_queue_message():
#: By default messages sent to exchanges are persistent (delivery_mode=2),
#: and queues and exchanges are durable.
exchange = Exchange()
connection = Connection(app.config['OUTGOING_QUEUE_HOSTNAME'])
# Create/access a queue bound to the connection.
queue = Queue(app.config['OUTGOING_QUEUE'], exchange, routing_key='#')(connection)
queue.declare()
message = queue.get()
if message:
signature = message.body
message.ack() #acknowledges message, ensuring its removal.
return signature
else:
return "no message", 404
@app.route("/removeallmessages")
#Gets the next message from target queue. Returns the signed JSON.
def remove_all_messages():
while True:
queue_message = get_last_queue_message()
if queue_message == ('no message', 404):
break
return "done", 202
@app.route("/")
def check_status():
return "Everything is OK"
|
Remove all has to reflect changes to get next
|
Remove all has to reflect changes to get next
|
Python
|
mit
|
LandRegistry/register-publisher,LandRegistry/register-publisher
|
from kombu import Connection, Exchange, Queue
from flask import Flask
import os
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
@app.route("/getnextqueuemessage")
#Gets the next message from target queue. Returns the signed JSON.
def get_last_queue_message():
#: By default messages sent to exchanges are persistent (delivery_mode=2),
#: and queues and exchanges are durable.
exchange = Exchange()
connection = Connection(app.config['OUTGOING_QUEUE_HOSTNAME'])
# Create/access a queue bound to the connection.
queue = Queue(app.config['OUTGOING_QUEUE'], exchange, routing_key='#')(connection)
queue.declare()
message = queue.get()
if message:
signature = message.body
message.ack() #acknowledges message, ensuring its removal.
return signature
else:
return "no message", 404
@app.route("/removeallmessages")
#Gets the next message from target queue. Returns the signed JSON.
def remove_all_messages():
while True:
queue_message = get_last_queue_message()
if queue_message == 'no message':
break
return "done", 202
@app.route("/")
def check_status():
return "Everything is OK"
Remove all has to reflect changes to get next
|
from kombu import Connection, Exchange, Queue
from flask import Flask
import os
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
@app.route("/getnextqueuemessage")
#Gets the next message from target queue. Returns the signed JSON.
def get_last_queue_message():
#: By default messages sent to exchanges are persistent (delivery_mode=2),
#: and queues and exchanges are durable.
exchange = Exchange()
connection = Connection(app.config['OUTGOING_QUEUE_HOSTNAME'])
# Create/access a queue bound to the connection.
queue = Queue(app.config['OUTGOING_QUEUE'], exchange, routing_key='#')(connection)
queue.declare()
message = queue.get()
if message:
signature = message.body
message.ack() #acknowledges message, ensuring its removal.
return signature
else:
return "no message", 404
@app.route("/removeallmessages")
#Gets the next message from target queue. Returns the signed JSON.
def remove_all_messages():
while True:
queue_message = get_last_queue_message()
if queue_message == ('no message', 404):
break
return "done", 202
@app.route("/")
def check_status():
return "Everything is OK"
|
<commit_before>from kombu import Connection, Exchange, Queue
from flask import Flask
import os
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
@app.route("/getnextqueuemessage")
#Gets the next message from target queue. Returns the signed JSON.
def get_last_queue_message():
#: By default messages sent to exchanges are persistent (delivery_mode=2),
#: and queues and exchanges are durable.
exchange = Exchange()
connection = Connection(app.config['OUTGOING_QUEUE_HOSTNAME'])
# Create/access a queue bound to the connection.
queue = Queue(app.config['OUTGOING_QUEUE'], exchange, routing_key='#')(connection)
queue.declare()
message = queue.get()
if message:
signature = message.body
message.ack() #acknowledges message, ensuring its removal.
return signature
else:
return "no message", 404
@app.route("/removeallmessages")
#Gets the next message from target queue. Returns the signed JSON.
def remove_all_messages():
while True:
queue_message = get_last_queue_message()
if queue_message == 'no message':
break
return "done", 202
@app.route("/")
def check_status():
return "Everything is OK"
<commit_msg>Remove all has to reflect changes to get next<commit_after>
|
from kombu import Connection, Exchange, Queue
from flask import Flask
import os
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
@app.route("/getnextqueuemessage")
#Gets the next message from target queue. Returns the signed JSON.
def get_last_queue_message():
#: By default messages sent to exchanges are persistent (delivery_mode=2),
#: and queues and exchanges are durable.
exchange = Exchange()
connection = Connection(app.config['OUTGOING_QUEUE_HOSTNAME'])
# Create/access a queue bound to the connection.
queue = Queue(app.config['OUTGOING_QUEUE'], exchange, routing_key='#')(connection)
queue.declare()
message = queue.get()
if message:
signature = message.body
message.ack() #acknowledges message, ensuring its removal.
return signature
else:
return "no message", 404
@app.route("/removeallmessages")
#Gets the next message from target queue. Returns the signed JSON.
def remove_all_messages():
while True:
queue_message = get_last_queue_message()
if queue_message == ('no message', 404):
break
return "done", 202
@app.route("/")
def check_status():
return "Everything is OK"
|
from kombu import Connection, Exchange, Queue
from flask import Flask
import os
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
@app.route("/getnextqueuemessage")
#Gets the next message from target queue. Returns the signed JSON.
def get_last_queue_message():
#: By default messages sent to exchanges are persistent (delivery_mode=2),
#: and queues and exchanges are durable.
exchange = Exchange()
connection = Connection(app.config['OUTGOING_QUEUE_HOSTNAME'])
# Create/access a queue bound to the connection.
queue = Queue(app.config['OUTGOING_QUEUE'], exchange, routing_key='#')(connection)
queue.declare()
message = queue.get()
if message:
signature = message.body
message.ack() #acknowledges message, ensuring its removal.
return signature
else:
return "no message", 404
@app.route("/removeallmessages")
#Gets the next message from target queue. Returns the signed JSON.
def remove_all_messages():
while True:
queue_message = get_last_queue_message()
if queue_message == 'no message':
break
return "done", 202
@app.route("/")
def check_status():
return "Everything is OK"
Remove all has to reflect changes to get nextfrom kombu import Connection, Exchange, Queue
from flask import Flask
import os
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
@app.route("/getnextqueuemessage")
#Gets the next message from target queue. Returns the signed JSON.
def get_last_queue_message():
#: By default messages sent to exchanges are persistent (delivery_mode=2),
#: and queues and exchanges are durable.
exchange = Exchange()
connection = Connection(app.config['OUTGOING_QUEUE_HOSTNAME'])
# Create/access a queue bound to the connection.
queue = Queue(app.config['OUTGOING_QUEUE'], exchange, routing_key='#')(connection)
queue.declare()
message = queue.get()
if message:
signature = message.body
message.ack() #acknowledges message, ensuring its removal.
return signature
else:
return "no message", 404
@app.route("/removeallmessages")
#Gets the next message from target queue. Returns the signed JSON.
def remove_all_messages():
while True:
queue_message = get_last_queue_message()
if queue_message == ('no message', 404):
break
return "done", 202
@app.route("/")
def check_status():
return "Everything is OK"
|
<commit_before>from kombu import Connection, Exchange, Queue
from flask import Flask
import os
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
@app.route("/getnextqueuemessage")
#Gets the next message from target queue. Returns the signed JSON.
def get_last_queue_message():
#: By default messages sent to exchanges are persistent (delivery_mode=2),
#: and queues and exchanges are durable.
exchange = Exchange()
connection = Connection(app.config['OUTGOING_QUEUE_HOSTNAME'])
# Create/access a queue bound to the connection.
queue = Queue(app.config['OUTGOING_QUEUE'], exchange, routing_key='#')(connection)
queue.declare()
message = queue.get()
if message:
signature = message.body
message.ack() #acknowledges message, ensuring its removal.
return signature
else:
return "no message", 404
@app.route("/removeallmessages")
#Gets the next message from target queue. Returns the signed JSON.
def remove_all_messages():
while True:
queue_message = get_last_queue_message()
if queue_message == 'no message':
break
return "done", 202
@app.route("/")
def check_status():
return "Everything is OK"
<commit_msg>Remove all has to reflect changes to get next<commit_after>from kombu import Connection, Exchange, Queue
from flask import Flask
import os
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
@app.route("/getnextqueuemessage")
#Gets the next message from target queue. Returns the signed JSON.
def get_last_queue_message():
#: By default messages sent to exchanges are persistent (delivery_mode=2),
#: and queues and exchanges are durable.
exchange = Exchange()
connection = Connection(app.config['OUTGOING_QUEUE_HOSTNAME'])
# Create/access a queue bound to the connection.
queue = Queue(app.config['OUTGOING_QUEUE'], exchange, routing_key='#')(connection)
queue.declare()
message = queue.get()
if message:
signature = message.body
message.ack() #acknowledges message, ensuring its removal.
return signature
else:
return "no message", 404
@app.route("/removeallmessages")
#Gets the next message from target queue. Returns the signed JSON.
def remove_all_messages():
while True:
queue_message = get_last_queue_message()
if queue_message == ('no message', 404):
break
return "done", 202
@app.route("/")
def check_status():
return "Everything is OK"
|
1f697a2c7bcf0f7769a9fc4f81be676ed5ee97c6
|
examples/flask/flask_seguro/cart.py
|
examples/flask/flask_seguro/cart.py
|
from flask_seguro.products import Products
from flask import current_app as app
class Cart:
def __init__(self, cart_dict={}):
if cart_dict == {}:
self.total = 0
self.subtotal = 0
self.items = []
else:
self.total = cart_dict["total"]
self.subtotal = cart_dict["subtotal"]
self.items = cart_dict["items"]
self.extra_amount = float(app.config['EXTRA_AMOUNT'])
def to_dict(self):
return {"total": self.total,
"subtotal": self.subtotal,
"items": self.items,
"extra_amount": self.extra_amount}
def change_item(self, item_id, operation):
product = Products().get_one(item_id)
if product:
if operation == 'add':
self.items.append(product)
elif operation == 'remove':
cart_product = filter(
lambda x: x['id'] == product['id'], self.items)
self.items.remove(cart_product[0])
self.update()
return True
else:
return False
def update(self):
subtotal = float(0)
total = float(0)
for product in self.items:
subtotal += float(product["price"])
if subtotal > 0:
total = subtotal + self.extra_amount
self.subtotal = subtotal
self.total = total
|
from flask_seguro.products import Products
from flask import current_app as app
class Cart:
def __init__(self, cart_dict=None):
cart_dict = cart_dict or {}
if cart_dict == {}:
self.total = 0
self.subtotal = 0
self.items = []
else:
self.total = cart_dict["total"]
self.subtotal = cart_dict["subtotal"]
self.items = cart_dict["items"]
self.extra_amount = float(app.config['EXTRA_AMOUNT'])
def to_dict(self):
return {"total": self.total,
"subtotal": self.subtotal,
"items": self.items,
"extra_amount": self.extra_amount}
def change_item(self, item_id, operation):
product = Products().get_one(item_id)
if product:
if operation == 'add':
self.items.append(product)
elif operation == 'remove':
cart_product = filter(
lambda x: x['id'] == product['id'], self.items)
self.items.remove(cart_product[0])
self.update()
return True
else:
return False
def update(self):
subtotal = float(0)
total = float(0)
for product in self.items:
subtotal += float(product["price"])
if subtotal > 0:
total = subtotal + self.extra_amount
self.subtotal = subtotal
self.total = total
|
Fix dangerous default mutable value
|
Fix dangerous default mutable value
|
Python
|
mit
|
rgcarrasqueira/python-pagseguro,vintasoftware/python-pagseguro,rochacbruno/python-pagseguro
|
from flask_seguro.products import Products
from flask import current_app as app
class Cart:
def __init__(self, cart_dict={}):
if cart_dict == {}:
self.total = 0
self.subtotal = 0
self.items = []
else:
self.total = cart_dict["total"]
self.subtotal = cart_dict["subtotal"]
self.items = cart_dict["items"]
self.extra_amount = float(app.config['EXTRA_AMOUNT'])
def to_dict(self):
return {"total": self.total,
"subtotal": self.subtotal,
"items": self.items,
"extra_amount": self.extra_amount}
def change_item(self, item_id, operation):
product = Products().get_one(item_id)
if product:
if operation == 'add':
self.items.append(product)
elif operation == 'remove':
cart_product = filter(
lambda x: x['id'] == product['id'], self.items)
self.items.remove(cart_product[0])
self.update()
return True
else:
return False
def update(self):
subtotal = float(0)
total = float(0)
for product in self.items:
subtotal += float(product["price"])
if subtotal > 0:
total = subtotal + self.extra_amount
self.subtotal = subtotal
self.total = total
Fix dangerous default mutable value
|
from flask_seguro.products import Products
from flask import current_app as app
class Cart:
def __init__(self, cart_dict=None):
cart_dict = cart_dict or {}
if cart_dict == {}:
self.total = 0
self.subtotal = 0
self.items = []
else:
self.total = cart_dict["total"]
self.subtotal = cart_dict["subtotal"]
self.items = cart_dict["items"]
self.extra_amount = float(app.config['EXTRA_AMOUNT'])
def to_dict(self):
return {"total": self.total,
"subtotal": self.subtotal,
"items": self.items,
"extra_amount": self.extra_amount}
def change_item(self, item_id, operation):
product = Products().get_one(item_id)
if product:
if operation == 'add':
self.items.append(product)
elif operation == 'remove':
cart_product = filter(
lambda x: x['id'] == product['id'], self.items)
self.items.remove(cart_product[0])
self.update()
return True
else:
return False
def update(self):
subtotal = float(0)
total = float(0)
for product in self.items:
subtotal += float(product["price"])
if subtotal > 0:
total = subtotal + self.extra_amount
self.subtotal = subtotal
self.total = total
|
<commit_before>from flask_seguro.products import Products
from flask import current_app as app
class Cart:
def __init__(self, cart_dict={}):
if cart_dict == {}:
self.total = 0
self.subtotal = 0
self.items = []
else:
self.total = cart_dict["total"]
self.subtotal = cart_dict["subtotal"]
self.items = cart_dict["items"]
self.extra_amount = float(app.config['EXTRA_AMOUNT'])
def to_dict(self):
return {"total": self.total,
"subtotal": self.subtotal,
"items": self.items,
"extra_amount": self.extra_amount}
def change_item(self, item_id, operation):
product = Products().get_one(item_id)
if product:
if operation == 'add':
self.items.append(product)
elif operation == 'remove':
cart_product = filter(
lambda x: x['id'] == product['id'], self.items)
self.items.remove(cart_product[0])
self.update()
return True
else:
return False
def update(self):
subtotal = float(0)
total = float(0)
for product in self.items:
subtotal += float(product["price"])
if subtotal > 0:
total = subtotal + self.extra_amount
self.subtotal = subtotal
self.total = total
<commit_msg>Fix dangerous default mutable value<commit_after>
|
from flask_seguro.products import Products
from flask import current_app as app
class Cart:
def __init__(self, cart_dict=None):
cart_dict = cart_dict or {}
if cart_dict == {}:
self.total = 0
self.subtotal = 0
self.items = []
else:
self.total = cart_dict["total"]
self.subtotal = cart_dict["subtotal"]
self.items = cart_dict["items"]
self.extra_amount = float(app.config['EXTRA_AMOUNT'])
def to_dict(self):
return {"total": self.total,
"subtotal": self.subtotal,
"items": self.items,
"extra_amount": self.extra_amount}
def change_item(self, item_id, operation):
product = Products().get_one(item_id)
if product:
if operation == 'add':
self.items.append(product)
elif operation == 'remove':
cart_product = filter(
lambda x: x['id'] == product['id'], self.items)
self.items.remove(cart_product[0])
self.update()
return True
else:
return False
def update(self):
subtotal = float(0)
total = float(0)
for product in self.items:
subtotal += float(product["price"])
if subtotal > 0:
total = subtotal + self.extra_amount
self.subtotal = subtotal
self.total = total
|
from flask_seguro.products import Products
from flask import current_app as app
class Cart:
def __init__(self, cart_dict={}):
if cart_dict == {}:
self.total = 0
self.subtotal = 0
self.items = []
else:
self.total = cart_dict["total"]
self.subtotal = cart_dict["subtotal"]
self.items = cart_dict["items"]
self.extra_amount = float(app.config['EXTRA_AMOUNT'])
def to_dict(self):
return {"total": self.total,
"subtotal": self.subtotal,
"items": self.items,
"extra_amount": self.extra_amount}
def change_item(self, item_id, operation):
product = Products().get_one(item_id)
if product:
if operation == 'add':
self.items.append(product)
elif operation == 'remove':
cart_product = filter(
lambda x: x['id'] == product['id'], self.items)
self.items.remove(cart_product[0])
self.update()
return True
else:
return False
def update(self):
subtotal = float(0)
total = float(0)
for product in self.items:
subtotal += float(product["price"])
if subtotal > 0:
total = subtotal + self.extra_amount
self.subtotal = subtotal
self.total = total
Fix dangerous default mutable valuefrom flask_seguro.products import Products
from flask import current_app as app
class Cart:
def __init__(self, cart_dict=None):
cart_dict = cart_dict or {}
if cart_dict == {}:
self.total = 0
self.subtotal = 0
self.items = []
else:
self.total = cart_dict["total"]
self.subtotal = cart_dict["subtotal"]
self.items = cart_dict["items"]
self.extra_amount = float(app.config['EXTRA_AMOUNT'])
def to_dict(self):
return {"total": self.total,
"subtotal": self.subtotal,
"items": self.items,
"extra_amount": self.extra_amount}
def change_item(self, item_id, operation):
product = Products().get_one(item_id)
if product:
if operation == 'add':
self.items.append(product)
elif operation == 'remove':
cart_product = filter(
lambda x: x['id'] == product['id'], self.items)
self.items.remove(cart_product[0])
self.update()
return True
else:
return False
def update(self):
subtotal = float(0)
total = float(0)
for product in self.items:
subtotal += float(product["price"])
if subtotal > 0:
total = subtotal + self.extra_amount
self.subtotal = subtotal
self.total = total
|
<commit_before>from flask_seguro.products import Products
from flask import current_app as app
class Cart:
def __init__(self, cart_dict={}):
if cart_dict == {}:
self.total = 0
self.subtotal = 0
self.items = []
else:
self.total = cart_dict["total"]
self.subtotal = cart_dict["subtotal"]
self.items = cart_dict["items"]
self.extra_amount = float(app.config['EXTRA_AMOUNT'])
def to_dict(self):
return {"total": self.total,
"subtotal": self.subtotal,
"items": self.items,
"extra_amount": self.extra_amount}
def change_item(self, item_id, operation):
product = Products().get_one(item_id)
if product:
if operation == 'add':
self.items.append(product)
elif operation == 'remove':
cart_product = filter(
lambda x: x['id'] == product['id'], self.items)
self.items.remove(cart_product[0])
self.update()
return True
else:
return False
def update(self):
subtotal = float(0)
total = float(0)
for product in self.items:
subtotal += float(product["price"])
if subtotal > 0:
total = subtotal + self.extra_amount
self.subtotal = subtotal
self.total = total
<commit_msg>Fix dangerous default mutable value<commit_after>from flask_seguro.products import Products
from flask import current_app as app
class Cart:
def __init__(self, cart_dict=None):
cart_dict = cart_dict or {}
if cart_dict == {}:
self.total = 0
self.subtotal = 0
self.items = []
else:
self.total = cart_dict["total"]
self.subtotal = cart_dict["subtotal"]
self.items = cart_dict["items"]
self.extra_amount = float(app.config['EXTRA_AMOUNT'])
def to_dict(self):
return {"total": self.total,
"subtotal": self.subtotal,
"items": self.items,
"extra_amount": self.extra_amount}
def change_item(self, item_id, operation):
product = Products().get_one(item_id)
if product:
if operation == 'add':
self.items.append(product)
elif operation == 'remove':
cart_product = filter(
lambda x: x['id'] == product['id'], self.items)
self.items.remove(cart_product[0])
self.update()
return True
else:
return False
def update(self):
subtotal = float(0)
total = float(0)
for product in self.items:
subtotal += float(product["price"])
if subtotal > 0:
total = subtotal + self.extra_amount
self.subtotal = subtotal
self.total = total
|
45ee803cad9b16351a2d02c7ce9d39a36f8f2480
|
stutuz/__init__.py
|
stutuz/__init__.py
|
#-*- coding:utf-8 -*-
from __future__ import division
from __future__ import absolute_import
from __future__ import with_statement
from __future__ import print_function
from __future__ import unicode_literals
from logbook import NestedSetup
from flask import Flask, request
from flaskext.babel import Babel, get_locale
from stutuz.extensions import genshi, db
from stutuz.converters import converters
from stutuz.modules import MOUNTS
def create_app(config=None):
app = Flask(__name__)
app.config.from_object('stutuz.configs')
if config is not None:
app.config.from_object(config)
app.config.from_envvar('STUTUZ_CONFIG', silent=True)
handlers = app.config.get('LOGBOOK_HANDLERS')
with NestedSetup(handlers):
for extension in genshi, db:
extension.init_app(app)
babel = Babel(app)
@babel.localeselector
def best_locale():
return request.accept_languages.best_match(
map(str, babel.list_translations()))
@app.context_processor
def locale():
return dict(locale=get_locale())
for middleware in app.config.get('MIDDLEWARES', ()):
app.wsgi_app = middleware(app.wsgi_app)
app.url_map.converters.update(converters)
for url_prefix, module in MOUNTS:
app.register_module(module, url_prefix=url_prefix)
return app
|
#-*- coding:utf-8 -*-
from __future__ import division
from __future__ import absolute_import
from __future__ import with_statement
from __future__ import print_function
from __future__ import unicode_literals
from logbook import NestedSetup
from flask import Flask, request
from flaskext.babel import Babel, get_locale
from stutuz.extensions import genshi, db
from stutuz.converters import converters
from stutuz.modules import MOUNTS
def create_app(config=None):
app = Flask(__name__)
app.config.from_object('stutuz.configs')
if config is not None:
app.config.from_object(config)
app.config.from_envvar('STUTUZ_CONFIG', silent=True)
handlers = app.config.get('LOGBOOK_HANDLERS')
with NestedSetup(handlers):
for extension in genshi, db:
extension.init_app(app)
babel = Babel(app)
@babel.localeselector
def best_locale():
if 'locale' in request.args:
return request.args['locale']
return request.accept_languages.best_match(
map(str, babel.list_translations()))
@app.context_processor
def locale():
return dict(locale=get_locale())
for middleware in app.config.get('MIDDLEWARES', ()):
app.wsgi_app = middleware(app.wsgi_app)
app.url_map.converters.update(converters)
for url_prefix, module in MOUNTS:
app.register_module(module, url_prefix=url_prefix)
return app
|
Allow setting locale with a query parameter
|
Allow setting locale with a query parameter
|
Python
|
bsd-2-clause
|
dag/stutuz
|
#-*- coding:utf-8 -*-
from __future__ import division
from __future__ import absolute_import
from __future__ import with_statement
from __future__ import print_function
from __future__ import unicode_literals
from logbook import NestedSetup
from flask import Flask, request
from flaskext.babel import Babel, get_locale
from stutuz.extensions import genshi, db
from stutuz.converters import converters
from stutuz.modules import MOUNTS
def create_app(config=None):
app = Flask(__name__)
app.config.from_object('stutuz.configs')
if config is not None:
app.config.from_object(config)
app.config.from_envvar('STUTUZ_CONFIG', silent=True)
handlers = app.config.get('LOGBOOK_HANDLERS')
with NestedSetup(handlers):
for extension in genshi, db:
extension.init_app(app)
babel = Babel(app)
@babel.localeselector
def best_locale():
return request.accept_languages.best_match(
map(str, babel.list_translations()))
@app.context_processor
def locale():
return dict(locale=get_locale())
for middleware in app.config.get('MIDDLEWARES', ()):
app.wsgi_app = middleware(app.wsgi_app)
app.url_map.converters.update(converters)
for url_prefix, module in MOUNTS:
app.register_module(module, url_prefix=url_prefix)
return app
Allow setting locale with a query parameter
|
#-*- coding:utf-8 -*-
from __future__ import division
from __future__ import absolute_import
from __future__ import with_statement
from __future__ import print_function
from __future__ import unicode_literals
from logbook import NestedSetup
from flask import Flask, request
from flaskext.babel import Babel, get_locale
from stutuz.extensions import genshi, db
from stutuz.converters import converters
from stutuz.modules import MOUNTS
def create_app(config=None):
app = Flask(__name__)
app.config.from_object('stutuz.configs')
if config is not None:
app.config.from_object(config)
app.config.from_envvar('STUTUZ_CONFIG', silent=True)
handlers = app.config.get('LOGBOOK_HANDLERS')
with NestedSetup(handlers):
for extension in genshi, db:
extension.init_app(app)
babel = Babel(app)
@babel.localeselector
def best_locale():
if 'locale' in request.args:
return request.args['locale']
return request.accept_languages.best_match(
map(str, babel.list_translations()))
@app.context_processor
def locale():
return dict(locale=get_locale())
for middleware in app.config.get('MIDDLEWARES', ()):
app.wsgi_app = middleware(app.wsgi_app)
app.url_map.converters.update(converters)
for url_prefix, module in MOUNTS:
app.register_module(module, url_prefix=url_prefix)
return app
|
<commit_before>#-*- coding:utf-8 -*-
from __future__ import division
from __future__ import absolute_import
from __future__ import with_statement
from __future__ import print_function
from __future__ import unicode_literals
from logbook import NestedSetup
from flask import Flask, request
from flaskext.babel import Babel, get_locale
from stutuz.extensions import genshi, db
from stutuz.converters import converters
from stutuz.modules import MOUNTS
def create_app(config=None):
app = Flask(__name__)
app.config.from_object('stutuz.configs')
if config is not None:
app.config.from_object(config)
app.config.from_envvar('STUTUZ_CONFIG', silent=True)
handlers = app.config.get('LOGBOOK_HANDLERS')
with NestedSetup(handlers):
for extension in genshi, db:
extension.init_app(app)
babel = Babel(app)
@babel.localeselector
def best_locale():
return request.accept_languages.best_match(
map(str, babel.list_translations()))
@app.context_processor
def locale():
return dict(locale=get_locale())
for middleware in app.config.get('MIDDLEWARES', ()):
app.wsgi_app = middleware(app.wsgi_app)
app.url_map.converters.update(converters)
for url_prefix, module in MOUNTS:
app.register_module(module, url_prefix=url_prefix)
return app
<commit_msg>Allow setting locale with a query parameter<commit_after>
|
#-*- coding:utf-8 -*-
from __future__ import division
from __future__ import absolute_import
from __future__ import with_statement
from __future__ import print_function
from __future__ import unicode_literals
from logbook import NestedSetup
from flask import Flask, request
from flaskext.babel import Babel, get_locale
from stutuz.extensions import genshi, db
from stutuz.converters import converters
from stutuz.modules import MOUNTS
def create_app(config=None):
app = Flask(__name__)
app.config.from_object('stutuz.configs')
if config is not None:
app.config.from_object(config)
app.config.from_envvar('STUTUZ_CONFIG', silent=True)
handlers = app.config.get('LOGBOOK_HANDLERS')
with NestedSetup(handlers):
for extension in genshi, db:
extension.init_app(app)
babel = Babel(app)
@babel.localeselector
def best_locale():
if 'locale' in request.args:
return request.args['locale']
return request.accept_languages.best_match(
map(str, babel.list_translations()))
@app.context_processor
def locale():
return dict(locale=get_locale())
for middleware in app.config.get('MIDDLEWARES', ()):
app.wsgi_app = middleware(app.wsgi_app)
app.url_map.converters.update(converters)
for url_prefix, module in MOUNTS:
app.register_module(module, url_prefix=url_prefix)
return app
|
#-*- coding:utf-8 -*-
from __future__ import division
from __future__ import absolute_import
from __future__ import with_statement
from __future__ import print_function
from __future__ import unicode_literals
from logbook import NestedSetup
from flask import Flask, request
from flaskext.babel import Babel, get_locale
from stutuz.extensions import genshi, db
from stutuz.converters import converters
from stutuz.modules import MOUNTS
def create_app(config=None):
app = Flask(__name__)
app.config.from_object('stutuz.configs')
if config is not None:
app.config.from_object(config)
app.config.from_envvar('STUTUZ_CONFIG', silent=True)
handlers = app.config.get('LOGBOOK_HANDLERS')
with NestedSetup(handlers):
for extension in genshi, db:
extension.init_app(app)
babel = Babel(app)
@babel.localeselector
def best_locale():
return request.accept_languages.best_match(
map(str, babel.list_translations()))
@app.context_processor
def locale():
return dict(locale=get_locale())
for middleware in app.config.get('MIDDLEWARES', ()):
app.wsgi_app = middleware(app.wsgi_app)
app.url_map.converters.update(converters)
for url_prefix, module in MOUNTS:
app.register_module(module, url_prefix=url_prefix)
return app
Allow setting locale with a query parameter#-*- coding:utf-8 -*-
from __future__ import division
from __future__ import absolute_import
from __future__ import with_statement
from __future__ import print_function
from __future__ import unicode_literals
from logbook import NestedSetup
from flask import Flask, request
from flaskext.babel import Babel, get_locale
from stutuz.extensions import genshi, db
from stutuz.converters import converters
from stutuz.modules import MOUNTS
def create_app(config=None):
app = Flask(__name__)
app.config.from_object('stutuz.configs')
if config is not None:
app.config.from_object(config)
app.config.from_envvar('STUTUZ_CONFIG', silent=True)
handlers = app.config.get('LOGBOOK_HANDLERS')
with NestedSetup(handlers):
for extension in genshi, db:
extension.init_app(app)
babel = Babel(app)
@babel.localeselector
def best_locale():
if 'locale' in request.args:
return request.args['locale']
return request.accept_languages.best_match(
map(str, babel.list_translations()))
@app.context_processor
def locale():
return dict(locale=get_locale())
for middleware in app.config.get('MIDDLEWARES', ()):
app.wsgi_app = middleware(app.wsgi_app)
app.url_map.converters.update(converters)
for url_prefix, module in MOUNTS:
app.register_module(module, url_prefix=url_prefix)
return app
|
<commit_before>#-*- coding:utf-8 -*-
from __future__ import division
from __future__ import absolute_import
from __future__ import with_statement
from __future__ import print_function
from __future__ import unicode_literals
from logbook import NestedSetup
from flask import Flask, request
from flaskext.babel import Babel, get_locale
from stutuz.extensions import genshi, db
from stutuz.converters import converters
from stutuz.modules import MOUNTS
def create_app(config=None):
app = Flask(__name__)
app.config.from_object('stutuz.configs')
if config is not None:
app.config.from_object(config)
app.config.from_envvar('STUTUZ_CONFIG', silent=True)
handlers = app.config.get('LOGBOOK_HANDLERS')
with NestedSetup(handlers):
for extension in genshi, db:
extension.init_app(app)
babel = Babel(app)
@babel.localeselector
def best_locale():
return request.accept_languages.best_match(
map(str, babel.list_translations()))
@app.context_processor
def locale():
return dict(locale=get_locale())
for middleware in app.config.get('MIDDLEWARES', ()):
app.wsgi_app = middleware(app.wsgi_app)
app.url_map.converters.update(converters)
for url_prefix, module in MOUNTS:
app.register_module(module, url_prefix=url_prefix)
return app
<commit_msg>Allow setting locale with a query parameter<commit_after>#-*- coding:utf-8 -*-
from __future__ import division
from __future__ import absolute_import
from __future__ import with_statement
from __future__ import print_function
from __future__ import unicode_literals
from logbook import NestedSetup
from flask import Flask, request
from flaskext.babel import Babel, get_locale
from stutuz.extensions import genshi, db
from stutuz.converters import converters
from stutuz.modules import MOUNTS
def create_app(config=None):
app = Flask(__name__)
app.config.from_object('stutuz.configs')
if config is not None:
app.config.from_object(config)
app.config.from_envvar('STUTUZ_CONFIG', silent=True)
handlers = app.config.get('LOGBOOK_HANDLERS')
with NestedSetup(handlers):
for extension in genshi, db:
extension.init_app(app)
babel = Babel(app)
@babel.localeselector
def best_locale():
if 'locale' in request.args:
return request.args['locale']
return request.accept_languages.best_match(
map(str, babel.list_translations()))
@app.context_processor
def locale():
return dict(locale=get_locale())
for middleware in app.config.get('MIDDLEWARES', ()):
app.wsgi_app = middleware(app.wsgi_app)
app.url_map.converters.update(converters)
for url_prefix, module in MOUNTS:
app.register_module(module, url_prefix=url_prefix)
return app
|
ae8a91dbfb657ba2ac4f1ef9aa89c8b8ba25cde2
|
wsgi_intercept/requests_intercept.py
|
wsgi_intercept/requests_intercept.py
|
"""Intercept HTTP connections that use `requests <http://docs.python-requests.org/en/latest/>`_.
"""
from . import WSGI_HTTPConnection, wsgi_fake_socket
from requests.packages.urllib3.connectionpool import (HTTPConnectionPool,
HTTPSConnectionPool)
from requests.packages.urllib3.connection import (HTTPConnection,
HTTPSConnection)
InterceptorMixin = WSGI_HTTPConnection
wsgi_fake_socket.settimeout = lambda self, timeout: None
class HTTP_WSGIInterceptor(InterceptorMixin, HTTPConnection):
pass
class HTTPS_WSGIInterceptor(InterceptorMixin, HTTPSConnection):
pass
def install():
HTTPConnectionPool.ConnectionCls = HTTP_WSGIInterceptor
HTTPSConnectionPool.ConnectionCls = HTTPS_WSGIInterceptor
def uninstall():
HTTPConnectionPool.ConnectionCls = HTTPConnection
HTTPSConnectionPool.ConnectionCls = HTTPSConnection
|
"""Intercept HTTP connections that use `requests <http://docs.python-requests.org/en/latest/>`_.
"""
from . import WSGI_HTTPConnection, WSGI_HTTPSConnection, wsgi_fake_socket
from requests.packages.urllib3.connectionpool import (HTTPConnectionPool,
HTTPSConnectionPool)
from requests.packages.urllib3.connection import (HTTPConnection,
HTTPSConnection)
wsgi_fake_socket.settimeout = lambda self, timeout: None
class HTTP_WSGIInterceptor(WSGI_HTTPConnection, HTTPConnection):
pass
class HTTPS_WSGIInterceptor(WSGI_HTTPSConnection, HTTPSConnection):
pass
def install():
HTTPConnectionPool.ConnectionCls = HTTP_WSGIInterceptor
HTTPSConnectionPool.ConnectionCls = HTTPS_WSGIInterceptor
def uninstall():
HTTPConnectionPool.ConnectionCls = HTTPConnection
HTTPSConnectionPool.ConnectionCls = HTTPSConnection
|
Fix the interceptor installation for HTTPSConnection.
|
Fix the interceptor installation for HTTPSConnection.
|
Python
|
mit
|
cdent/wsgi-intercept,sileht/python3-wsgi-intercept
|
"""Intercept HTTP connections that use `requests <http://docs.python-requests.org/en/latest/>`_.
"""
from . import WSGI_HTTPConnection, wsgi_fake_socket
from requests.packages.urllib3.connectionpool import (HTTPConnectionPool,
HTTPSConnectionPool)
from requests.packages.urllib3.connection import (HTTPConnection,
HTTPSConnection)
InterceptorMixin = WSGI_HTTPConnection
wsgi_fake_socket.settimeout = lambda self, timeout: None
class HTTP_WSGIInterceptor(InterceptorMixin, HTTPConnection):
pass
class HTTPS_WSGIInterceptor(InterceptorMixin, HTTPSConnection):
pass
def install():
HTTPConnectionPool.ConnectionCls = HTTP_WSGIInterceptor
HTTPSConnectionPool.ConnectionCls = HTTPS_WSGIInterceptor
def uninstall():
HTTPConnectionPool.ConnectionCls = HTTPConnection
HTTPSConnectionPool.ConnectionCls = HTTPSConnection
Fix the interceptor installation for HTTPSConnection.
|
"""Intercept HTTP connections that use `requests <http://docs.python-requests.org/en/latest/>`_.
"""
from . import WSGI_HTTPConnection, WSGI_HTTPSConnection, wsgi_fake_socket
from requests.packages.urllib3.connectionpool import (HTTPConnectionPool,
HTTPSConnectionPool)
from requests.packages.urllib3.connection import (HTTPConnection,
HTTPSConnection)
wsgi_fake_socket.settimeout = lambda self, timeout: None
class HTTP_WSGIInterceptor(WSGI_HTTPConnection, HTTPConnection):
pass
class HTTPS_WSGIInterceptor(WSGI_HTTPSConnection, HTTPSConnection):
pass
def install():
HTTPConnectionPool.ConnectionCls = HTTP_WSGIInterceptor
HTTPSConnectionPool.ConnectionCls = HTTPS_WSGIInterceptor
def uninstall():
HTTPConnectionPool.ConnectionCls = HTTPConnection
HTTPSConnectionPool.ConnectionCls = HTTPSConnection
|
<commit_before>"""Intercept HTTP connections that use `requests <http://docs.python-requests.org/en/latest/>`_.
"""
from . import WSGI_HTTPConnection, wsgi_fake_socket
from requests.packages.urllib3.connectionpool import (HTTPConnectionPool,
HTTPSConnectionPool)
from requests.packages.urllib3.connection import (HTTPConnection,
HTTPSConnection)
InterceptorMixin = WSGI_HTTPConnection
wsgi_fake_socket.settimeout = lambda self, timeout: None
class HTTP_WSGIInterceptor(InterceptorMixin, HTTPConnection):
pass
class HTTPS_WSGIInterceptor(InterceptorMixin, HTTPSConnection):
pass
def install():
HTTPConnectionPool.ConnectionCls = HTTP_WSGIInterceptor
HTTPSConnectionPool.ConnectionCls = HTTPS_WSGIInterceptor
def uninstall():
HTTPConnectionPool.ConnectionCls = HTTPConnection
HTTPSConnectionPool.ConnectionCls = HTTPSConnection
<commit_msg>Fix the interceptor installation for HTTPSConnection.<commit_after>
|
"""Intercept HTTP connections that use `requests <http://docs.python-requests.org/en/latest/>`_.
"""
from . import WSGI_HTTPConnection, WSGI_HTTPSConnection, wsgi_fake_socket
from requests.packages.urllib3.connectionpool import (HTTPConnectionPool,
HTTPSConnectionPool)
from requests.packages.urllib3.connection import (HTTPConnection,
HTTPSConnection)
wsgi_fake_socket.settimeout = lambda self, timeout: None
class HTTP_WSGIInterceptor(WSGI_HTTPConnection, HTTPConnection):
pass
class HTTPS_WSGIInterceptor(WSGI_HTTPSConnection, HTTPSConnection):
pass
def install():
HTTPConnectionPool.ConnectionCls = HTTP_WSGIInterceptor
HTTPSConnectionPool.ConnectionCls = HTTPS_WSGIInterceptor
def uninstall():
HTTPConnectionPool.ConnectionCls = HTTPConnection
HTTPSConnectionPool.ConnectionCls = HTTPSConnection
|
"""Intercept HTTP connections that use `requests <http://docs.python-requests.org/en/latest/>`_.
"""
from . import WSGI_HTTPConnection, wsgi_fake_socket
from requests.packages.urllib3.connectionpool import (HTTPConnectionPool,
HTTPSConnectionPool)
from requests.packages.urllib3.connection import (HTTPConnection,
HTTPSConnection)
InterceptorMixin = WSGI_HTTPConnection
wsgi_fake_socket.settimeout = lambda self, timeout: None
class HTTP_WSGIInterceptor(InterceptorMixin, HTTPConnection):
pass
class HTTPS_WSGIInterceptor(InterceptorMixin, HTTPSConnection):
pass
def install():
HTTPConnectionPool.ConnectionCls = HTTP_WSGIInterceptor
HTTPSConnectionPool.ConnectionCls = HTTPS_WSGIInterceptor
def uninstall():
HTTPConnectionPool.ConnectionCls = HTTPConnection
HTTPSConnectionPool.ConnectionCls = HTTPSConnection
Fix the interceptor installation for HTTPSConnection."""Intercept HTTP connections that use `requests <http://docs.python-requests.org/en/latest/>`_.
"""
from . import WSGI_HTTPConnection, WSGI_HTTPSConnection, wsgi_fake_socket
from requests.packages.urllib3.connectionpool import (HTTPConnectionPool,
HTTPSConnectionPool)
from requests.packages.urllib3.connection import (HTTPConnection,
HTTPSConnection)
wsgi_fake_socket.settimeout = lambda self, timeout: None
class HTTP_WSGIInterceptor(WSGI_HTTPConnection, HTTPConnection):
pass
class HTTPS_WSGIInterceptor(WSGI_HTTPSConnection, HTTPSConnection):
pass
def install():
HTTPConnectionPool.ConnectionCls = HTTP_WSGIInterceptor
HTTPSConnectionPool.ConnectionCls = HTTPS_WSGIInterceptor
def uninstall():
HTTPConnectionPool.ConnectionCls = HTTPConnection
HTTPSConnectionPool.ConnectionCls = HTTPSConnection
|
<commit_before>"""Intercept HTTP connections that use `requests <http://docs.python-requests.org/en/latest/>`_.
"""
from . import WSGI_HTTPConnection, wsgi_fake_socket
from requests.packages.urllib3.connectionpool import (HTTPConnectionPool,
HTTPSConnectionPool)
from requests.packages.urllib3.connection import (HTTPConnection,
HTTPSConnection)
InterceptorMixin = WSGI_HTTPConnection
wsgi_fake_socket.settimeout = lambda self, timeout: None
class HTTP_WSGIInterceptor(InterceptorMixin, HTTPConnection):
pass
class HTTPS_WSGIInterceptor(InterceptorMixin, HTTPSConnection):
pass
def install():
HTTPConnectionPool.ConnectionCls = HTTP_WSGIInterceptor
HTTPSConnectionPool.ConnectionCls = HTTPS_WSGIInterceptor
def uninstall():
HTTPConnectionPool.ConnectionCls = HTTPConnection
HTTPSConnectionPool.ConnectionCls = HTTPSConnection
<commit_msg>Fix the interceptor installation for HTTPSConnection.<commit_after>"""Intercept HTTP connections that use `requests <http://docs.python-requests.org/en/latest/>`_.
"""
from . import WSGI_HTTPConnection, WSGI_HTTPSConnection, wsgi_fake_socket
from requests.packages.urllib3.connectionpool import (HTTPConnectionPool,
HTTPSConnectionPool)
from requests.packages.urllib3.connection import (HTTPConnection,
HTTPSConnection)
wsgi_fake_socket.settimeout = lambda self, timeout: None
class HTTP_WSGIInterceptor(WSGI_HTTPConnection, HTTPConnection):
pass
class HTTPS_WSGIInterceptor(WSGI_HTTPSConnection, HTTPSConnection):
pass
def install():
HTTPConnectionPool.ConnectionCls = HTTP_WSGIInterceptor
HTTPSConnectionPool.ConnectionCls = HTTPS_WSGIInterceptor
def uninstall():
HTTPConnectionPool.ConnectionCls = HTTPConnection
HTTPSConnectionPool.ConnectionCls = HTTPSConnection
|
8bfe6e791228ccbc3143f3a8747c68d2e8b0cbb5
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
from django.conf import settings
from django.core.management import execute_from_command_line
import django
import os
import sys
if not settings.configured:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproj.settings")
django.setup()
module_root = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, module_root)
def runtests():
argv = sys.argv[:1] + ['test', 'testproj'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
|
#!/usr/bin/env python
from django.conf import settings
from django.core.management import execute_from_command_line
import django
import os
import sys
if not settings.configured:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproj.settings")
if django.VERSION >= (1,7):
django.setup()
module_root = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, module_root)
def runtests():
argv = sys.argv[:1] + ['test', 'testproj'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
|
Fix running tests on lower Django versions
|
Fix running tests on lower Django versions
|
Python
|
apache-2.0
|
AdrianLC/django-parler-rest,edoburu/django-parler-rest
|
#!/usr/bin/env python
from django.conf import settings
from django.core.management import execute_from_command_line
import django
import os
import sys
if not settings.configured:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproj.settings")
django.setup()
module_root = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, module_root)
def runtests():
argv = sys.argv[:1] + ['test', 'testproj'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
Fix running tests on lower Django versions
|
#!/usr/bin/env python
from django.conf import settings
from django.core.management import execute_from_command_line
import django
import os
import sys
if not settings.configured:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproj.settings")
if django.VERSION >= (1,7):
django.setup()
module_root = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, module_root)
def runtests():
argv = sys.argv[:1] + ['test', 'testproj'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
|
<commit_before>#!/usr/bin/env python
from django.conf import settings
from django.core.management import execute_from_command_line
import django
import os
import sys
if not settings.configured:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproj.settings")
django.setup()
module_root = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, module_root)
def runtests():
argv = sys.argv[:1] + ['test', 'testproj'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
<commit_msg>Fix running tests on lower Django versions<commit_after>
|
#!/usr/bin/env python
from django.conf import settings
from django.core.management import execute_from_command_line
import django
import os
import sys
if not settings.configured:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproj.settings")
if django.VERSION >= (1,7):
django.setup()
module_root = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, module_root)
def runtests():
argv = sys.argv[:1] + ['test', 'testproj'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
|
#!/usr/bin/env python
from django.conf import settings
from django.core.management import execute_from_command_line
import django
import os
import sys
if not settings.configured:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproj.settings")
django.setup()
module_root = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, module_root)
def runtests():
argv = sys.argv[:1] + ['test', 'testproj'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
Fix running tests on lower Django versions#!/usr/bin/env python
from django.conf import settings
from django.core.management import execute_from_command_line
import django
import os
import sys
if not settings.configured:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproj.settings")
if django.VERSION >= (1,7):
django.setup()
module_root = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, module_root)
def runtests():
argv = sys.argv[:1] + ['test', 'testproj'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
|
<commit_before>#!/usr/bin/env python
from django.conf import settings
from django.core.management import execute_from_command_line
import django
import os
import sys
if not settings.configured:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproj.settings")
django.setup()
module_root = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, module_root)
def runtests():
argv = sys.argv[:1] + ['test', 'testproj'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
<commit_msg>Fix running tests on lower Django versions<commit_after>#!/usr/bin/env python
from django.conf import settings
from django.core.management import execute_from_command_line
import django
import os
import sys
if not settings.configured:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproj.settings")
if django.VERSION >= (1,7):
django.setup()
module_root = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, module_root)
def runtests():
argv = sys.argv[:1] + ['test', 'testproj'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
|
b6836dd7bccd40eec146bc034cc8ac83b4e7f16a
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import sys
import os
from coverage import coverage
from optparse import OptionParser
# This envar must be set before importing NoseTestSuiteRunner,
# silence flake8 E402 ("module level import not at top of file").
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_settings")
from django_nose import NoseTestSuiteRunner # noqa: E402
def run_tests(*test_args):
if not test_args:
test_args = ['tests']
# Run tests
test_runner = NoseTestSuiteRunner(verbosity=1)
c = coverage(source=['edx_shopify'], omit=['*migrations*', '*tests*'],
auto_data=True)
c.start()
num_failures = test_runner.run_tests(test_args)
c.stop()
if num_failures > 0:
sys.exit(num_failures)
if __name__ == '__main__':
parser = OptionParser()
(options, args) = parser.parse_args()
run_tests(*args)
|
#!/usr/bin/env python
import sys
import os
from coverage import coverage
from optparse import OptionParser
# This envar must be set before importing NoseTestSuiteRunner,
# silence flake8 E402 ("module level import not at top of file").
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_settings")
from django_nose import NoseTestSuiteRunner # noqa: E402
def run_tests(*test_args):
if not test_args:
test_args = ['tests']
# Add Open edX common and LMS Django apps to PYTHONPATH
sys.path.append(os.path.join(os.path.dirname(__file__),
'edx-platform'))
for directory in ['common', 'lms']:
sys.path.append(os.path.join(os.path.dirname(__file__),
'edx-platform',
directory,
'djangoapps'))
for lib in ['xmodule', 'dogstats', 'capa', 'calc', 'chem']:
sys.path.append(os.path.join(os.path.dirname(__file__),
'edx-platform',
'common',
'lib',
lib))
# Run tests
test_runner = NoseTestSuiteRunner(verbosity=1)
c = coverage(source=['edx_shopify'], omit=['*migrations*', '*tests*'],
auto_data=True)
c.start()
num_failures = test_runner.run_tests(test_args)
c.stop()
if num_failures > 0:
sys.exit(num_failures)
if __name__ == '__main__':
parser = OptionParser()
(options, args) = parser.parse_args()
run_tests(*args)
|
Extend sys.path with required paths from edx-platform submodule
|
Extend sys.path with required paths from edx-platform submodule
|
Python
|
agpl-3.0
|
hastexo/edx-shopify,fghaas/edx-shopify
|
#!/usr/bin/env python
import sys
import os
from coverage import coverage
from optparse import OptionParser
# This envar must be set before importing NoseTestSuiteRunner,
# silence flake8 E402 ("module level import not at top of file").
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_settings")
from django_nose import NoseTestSuiteRunner # noqa: E402
def run_tests(*test_args):
if not test_args:
test_args = ['tests']
# Run tests
test_runner = NoseTestSuiteRunner(verbosity=1)
c = coverage(source=['edx_shopify'], omit=['*migrations*', '*tests*'],
auto_data=True)
c.start()
num_failures = test_runner.run_tests(test_args)
c.stop()
if num_failures > 0:
sys.exit(num_failures)
if __name__ == '__main__':
parser = OptionParser()
(options, args) = parser.parse_args()
run_tests(*args)
Extend sys.path with required paths from edx-platform submodule
|
#!/usr/bin/env python
import sys
import os
from coverage import coverage
from optparse import OptionParser
# This envar must be set before importing NoseTestSuiteRunner,
# silence flake8 E402 ("module level import not at top of file").
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_settings")
from django_nose import NoseTestSuiteRunner # noqa: E402
def run_tests(*test_args):
if not test_args:
test_args = ['tests']
# Add Open edX common and LMS Django apps to PYTHONPATH
sys.path.append(os.path.join(os.path.dirname(__file__),
'edx-platform'))
for directory in ['common', 'lms']:
sys.path.append(os.path.join(os.path.dirname(__file__),
'edx-platform',
directory,
'djangoapps'))
for lib in ['xmodule', 'dogstats', 'capa', 'calc', 'chem']:
sys.path.append(os.path.join(os.path.dirname(__file__),
'edx-platform',
'common',
'lib',
lib))
# Run tests
test_runner = NoseTestSuiteRunner(verbosity=1)
c = coverage(source=['edx_shopify'], omit=['*migrations*', '*tests*'],
auto_data=True)
c.start()
num_failures = test_runner.run_tests(test_args)
c.stop()
if num_failures > 0:
sys.exit(num_failures)
if __name__ == '__main__':
parser = OptionParser()
(options, args) = parser.parse_args()
run_tests(*args)
|
<commit_before>#!/usr/bin/env python
import sys
import os
from coverage import coverage
from optparse import OptionParser
# This envar must be set before importing NoseTestSuiteRunner,
# silence flake8 E402 ("module level import not at top of file").
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_settings")
from django_nose import NoseTestSuiteRunner # noqa: E402
def run_tests(*test_args):
if not test_args:
test_args = ['tests']
# Run tests
test_runner = NoseTestSuiteRunner(verbosity=1)
c = coverage(source=['edx_shopify'], omit=['*migrations*', '*tests*'],
auto_data=True)
c.start()
num_failures = test_runner.run_tests(test_args)
c.stop()
if num_failures > 0:
sys.exit(num_failures)
if __name__ == '__main__':
parser = OptionParser()
(options, args) = parser.parse_args()
run_tests(*args)
<commit_msg>Extend sys.path with required paths from edx-platform submodule<commit_after>
|
#!/usr/bin/env python
import sys
import os
from coverage import coverage
from optparse import OptionParser
# This envar must be set before importing NoseTestSuiteRunner,
# silence flake8 E402 ("module level import not at top of file").
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_settings")
from django_nose import NoseTestSuiteRunner # noqa: E402
def run_tests(*test_args):
if not test_args:
test_args = ['tests']
# Add Open edX common and LMS Django apps to PYTHONPATH
sys.path.append(os.path.join(os.path.dirname(__file__),
'edx-platform'))
for directory in ['common', 'lms']:
sys.path.append(os.path.join(os.path.dirname(__file__),
'edx-platform',
directory,
'djangoapps'))
for lib in ['xmodule', 'dogstats', 'capa', 'calc', 'chem']:
sys.path.append(os.path.join(os.path.dirname(__file__),
'edx-platform',
'common',
'lib',
lib))
# Run tests
test_runner = NoseTestSuiteRunner(verbosity=1)
c = coverage(source=['edx_shopify'], omit=['*migrations*', '*tests*'],
auto_data=True)
c.start()
num_failures = test_runner.run_tests(test_args)
c.stop()
if num_failures > 0:
sys.exit(num_failures)
if __name__ == '__main__':
parser = OptionParser()
(options, args) = parser.parse_args()
run_tests(*args)
|
#!/usr/bin/env python
import sys
import os
from coverage import coverage
from optparse import OptionParser
# This envar must be set before importing NoseTestSuiteRunner,
# silence flake8 E402 ("module level import not at top of file").
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_settings")
from django_nose import NoseTestSuiteRunner # noqa: E402
def run_tests(*test_args):
if not test_args:
test_args = ['tests']
# Run tests
test_runner = NoseTestSuiteRunner(verbosity=1)
c = coverage(source=['edx_shopify'], omit=['*migrations*', '*tests*'],
auto_data=True)
c.start()
num_failures = test_runner.run_tests(test_args)
c.stop()
if num_failures > 0:
sys.exit(num_failures)
if __name__ == '__main__':
parser = OptionParser()
(options, args) = parser.parse_args()
run_tests(*args)
Extend sys.path with required paths from edx-platform submodule#!/usr/bin/env python
import sys
import os
from coverage import coverage
from optparse import OptionParser
# This envar must be set before importing NoseTestSuiteRunner,
# silence flake8 E402 ("module level import not at top of file").
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_settings")
from django_nose import NoseTestSuiteRunner # noqa: E402
def run_tests(*test_args):
if not test_args:
test_args = ['tests']
# Add Open edX common and LMS Django apps to PYTHONPATH
sys.path.append(os.path.join(os.path.dirname(__file__),
'edx-platform'))
for directory in ['common', 'lms']:
sys.path.append(os.path.join(os.path.dirname(__file__),
'edx-platform',
directory,
'djangoapps'))
for lib in ['xmodule', 'dogstats', 'capa', 'calc', 'chem']:
sys.path.append(os.path.join(os.path.dirname(__file__),
'edx-platform',
'common',
'lib',
lib))
# Run tests
test_runner = NoseTestSuiteRunner(verbosity=1)
c = coverage(source=['edx_shopify'], omit=['*migrations*', '*tests*'],
auto_data=True)
c.start()
num_failures = test_runner.run_tests(test_args)
c.stop()
if num_failures > 0:
sys.exit(num_failures)
if __name__ == '__main__':
parser = OptionParser()
(options, args) = parser.parse_args()
run_tests(*args)
|
<commit_before>#!/usr/bin/env python
import sys
import os
from coverage import coverage
from optparse import OptionParser
# This envar must be set before importing NoseTestSuiteRunner,
# silence flake8 E402 ("module level import not at top of file").
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_settings")
from django_nose import NoseTestSuiteRunner # noqa: E402
def run_tests(*test_args):
if not test_args:
test_args = ['tests']
# Run tests
test_runner = NoseTestSuiteRunner(verbosity=1)
c = coverage(source=['edx_shopify'], omit=['*migrations*', '*tests*'],
auto_data=True)
c.start()
num_failures = test_runner.run_tests(test_args)
c.stop()
if num_failures > 0:
sys.exit(num_failures)
if __name__ == '__main__':
parser = OptionParser()
(options, args) = parser.parse_args()
run_tests(*args)
<commit_msg>Extend sys.path with required paths from edx-platform submodule<commit_after>#!/usr/bin/env python
import sys
import os
from coverage import coverage
from optparse import OptionParser
# This envar must be set before importing NoseTestSuiteRunner,
# silence flake8 E402 ("module level import not at top of file").
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_settings")
from django_nose import NoseTestSuiteRunner # noqa: E402
def run_tests(*test_args):
if not test_args:
test_args = ['tests']
# Add Open edX common and LMS Django apps to PYTHONPATH
sys.path.append(os.path.join(os.path.dirname(__file__),
'edx-platform'))
for directory in ['common', 'lms']:
sys.path.append(os.path.join(os.path.dirname(__file__),
'edx-platform',
directory,
'djangoapps'))
for lib in ['xmodule', 'dogstats', 'capa', 'calc', 'chem']:
sys.path.append(os.path.join(os.path.dirname(__file__),
'edx-platform',
'common',
'lib',
lib))
# Run tests
test_runner = NoseTestSuiteRunner(verbosity=1)
c = coverage(source=['edx_shopify'], omit=['*migrations*', '*tests*'],
auto_data=True)
c.start()
num_failures = test_runner.run_tests(test_args)
c.stop()
if num_failures > 0:
sys.exit(num_failures)
if __name__ == '__main__':
parser = OptionParser()
(options, args) = parser.parse_args()
run_tests(*args)
|
c69ea05755ecdc6fc0c05e39e5746445376d163a
|
provision/setup.py
|
provision/setup.py
|
from setuptools import setup, find_packages
setup(
name='acc_provision',
version='1.9.6',
description='Tool to provision ACI for ACI Containers Controller',
author="Cisco Systems, Inc.",
author_email="[email protected]",
url='http://github.com/noironetworks/aci-containers/',
license="http://www.apache.org/licenses/LICENSE-2.0",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'acc-provision=acc_provision.acc_provision:main',
]
},
install_requires=[
'requests',
'pyyaml',
'jinja2',
'pyopenssl',
],
)
|
from setuptools import setup, find_packages
setup(
name='acc_provision',
version='1.9.7',
description='Tool to provision ACI for ACI Containers Controller',
author="Cisco Systems, Inc.",
author_email="[email protected]",
url='http://github.com/noironetworks/aci-containers/',
license="http://www.apache.org/licenses/LICENSE-2.0",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'acc-provision=acc_provision.acc_provision:main',
]
},
install_requires=[
'requests',
'pyyaml',
'jinja2',
'pyopenssl',
],
)
|
Update acc-provision version to 1.9.7
|
Update acc-provision version to 1.9.7
|
Python
|
apache-2.0
|
noironetworks/aci-containers,noironetworks/aci-containers
|
from setuptools import setup, find_packages
setup(
name='acc_provision',
version='1.9.6',
description='Tool to provision ACI for ACI Containers Controller',
author="Cisco Systems, Inc.",
author_email="[email protected]",
url='http://github.com/noironetworks/aci-containers/',
license="http://www.apache.org/licenses/LICENSE-2.0",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'acc-provision=acc_provision.acc_provision:main',
]
},
install_requires=[
'requests',
'pyyaml',
'jinja2',
'pyopenssl',
],
)
Update acc-provision version to 1.9.7
|
from setuptools import setup, find_packages
setup(
name='acc_provision',
version='1.9.7',
description='Tool to provision ACI for ACI Containers Controller',
author="Cisco Systems, Inc.",
author_email="[email protected]",
url='http://github.com/noironetworks/aci-containers/',
license="http://www.apache.org/licenses/LICENSE-2.0",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'acc-provision=acc_provision.acc_provision:main',
]
},
install_requires=[
'requests',
'pyyaml',
'jinja2',
'pyopenssl',
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='acc_provision',
version='1.9.6',
description='Tool to provision ACI for ACI Containers Controller',
author="Cisco Systems, Inc.",
author_email="[email protected]",
url='http://github.com/noironetworks/aci-containers/',
license="http://www.apache.org/licenses/LICENSE-2.0",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'acc-provision=acc_provision.acc_provision:main',
]
},
install_requires=[
'requests',
'pyyaml',
'jinja2',
'pyopenssl',
],
)
<commit_msg>Update acc-provision version to 1.9.7<commit_after>
|
from setuptools import setup, find_packages
setup(
name='acc_provision',
version='1.9.7',
description='Tool to provision ACI for ACI Containers Controller',
author="Cisco Systems, Inc.",
author_email="[email protected]",
url='http://github.com/noironetworks/aci-containers/',
license="http://www.apache.org/licenses/LICENSE-2.0",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'acc-provision=acc_provision.acc_provision:main',
]
},
install_requires=[
'requests',
'pyyaml',
'jinja2',
'pyopenssl',
],
)
|
from setuptools import setup, find_packages
setup(
name='acc_provision',
version='1.9.6',
description='Tool to provision ACI for ACI Containers Controller',
author="Cisco Systems, Inc.",
author_email="[email protected]",
url='http://github.com/noironetworks/aci-containers/',
license="http://www.apache.org/licenses/LICENSE-2.0",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'acc-provision=acc_provision.acc_provision:main',
]
},
install_requires=[
'requests',
'pyyaml',
'jinja2',
'pyopenssl',
],
)
Update acc-provision version to 1.9.7from setuptools import setup, find_packages
setup(
name='acc_provision',
version='1.9.7',
description='Tool to provision ACI for ACI Containers Controller',
author="Cisco Systems, Inc.",
author_email="[email protected]",
url='http://github.com/noironetworks/aci-containers/',
license="http://www.apache.org/licenses/LICENSE-2.0",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'acc-provision=acc_provision.acc_provision:main',
]
},
install_requires=[
'requests',
'pyyaml',
'jinja2',
'pyopenssl',
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='acc_provision',
version='1.9.6',
description='Tool to provision ACI for ACI Containers Controller',
author="Cisco Systems, Inc.",
author_email="[email protected]",
url='http://github.com/noironetworks/aci-containers/',
license="http://www.apache.org/licenses/LICENSE-2.0",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'acc-provision=acc_provision.acc_provision:main',
]
},
install_requires=[
'requests',
'pyyaml',
'jinja2',
'pyopenssl',
],
)
<commit_msg>Update acc-provision version to 1.9.7<commit_after>from setuptools import setup, find_packages
setup(
name='acc_provision',
version='1.9.7',
description='Tool to provision ACI for ACI Containers Controller',
author="Cisco Systems, Inc.",
author_email="[email protected]",
url='http://github.com/noironetworks/aci-containers/',
license="http://www.apache.org/licenses/LICENSE-2.0",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'acc-provision=acc_provision.acc_provision:main',
]
},
install_requires=[
'requests',
'pyyaml',
'jinja2',
'pyopenssl',
],
)
|
bde09206bf308167a11bcb012753d10d845dc810
|
test_project/blog/models.py
|
test_project/blog/models.py
|
from django.db import models
from django.contrib.auth.models import User
class Entry(models.Model):
content = models.TextField()
author = models.ForeignKey(User)
created = models.DateTimeField()
class Comment(models.Model):
post = models.ForeignKey(Entry, related_name='comments')
content = models.TextField()
author = models.ForeignKey(User)
created = models.DateTimeField()
class Actor(models.Model):
name = models.CharField(max_length=32)
class Movie(models.Model):
name = models.CharField(max_length=32)
actors = models.ManyToManyField(Actor, related_name='movies')
score = models.IntegerField(default=0)
|
from django.db import models
from django.contrib.auth.models import User
class Entry(models.Model):
content = models.TextField()
author = models.ForeignKey(User)
created = models.DateTimeField()
class Comment(models.Model):
post = models.ForeignKey(Entry, related_name='comments')
content = models.TextField()
author = models.ForeignKey(User)
created = models.DateTimeField()
class SmartTag(models.Model):
entry = models.ForeignKey(Entry, related_name='smart_tags')
name = models.CharField(max_length=32)
class Actor(models.Model):
name = models.CharField(max_length=32)
class Movie(models.Model):
name = models.CharField(max_length=32)
actors = models.ManyToManyField(Actor, related_name='movies')
score = models.IntegerField(default=0)
|
Create SmartTag model to demonstrate multi-word resource names.
|
Create SmartTag model to demonstrate multi-word resource names.
|
Python
|
bsd-3-clause
|
juanique/django-chocolate,juanique/django-chocolate,juanique/django-chocolate
|
from django.db import models
from django.contrib.auth.models import User
class Entry(models.Model):
content = models.TextField()
author = models.ForeignKey(User)
created = models.DateTimeField()
class Comment(models.Model):
post = models.ForeignKey(Entry, related_name='comments')
content = models.TextField()
author = models.ForeignKey(User)
created = models.DateTimeField()
class Actor(models.Model):
name = models.CharField(max_length=32)
class Movie(models.Model):
name = models.CharField(max_length=32)
actors = models.ManyToManyField(Actor, related_name='movies')
score = models.IntegerField(default=0)
Create SmartTag model to demonstrate multi-word resource names.
|
from django.db import models
from django.contrib.auth.models import User
class Entry(models.Model):
content = models.TextField()
author = models.ForeignKey(User)
created = models.DateTimeField()
class Comment(models.Model):
post = models.ForeignKey(Entry, related_name='comments')
content = models.TextField()
author = models.ForeignKey(User)
created = models.DateTimeField()
class SmartTag(models.Model):
entry = models.ForeignKey(Entry, related_name='smart_tags')
name = models.CharField(max_length=32)
class Actor(models.Model):
name = models.CharField(max_length=32)
class Movie(models.Model):
name = models.CharField(max_length=32)
actors = models.ManyToManyField(Actor, related_name='movies')
score = models.IntegerField(default=0)
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
class Entry(models.Model):
content = models.TextField()
author = models.ForeignKey(User)
created = models.DateTimeField()
class Comment(models.Model):
post = models.ForeignKey(Entry, related_name='comments')
content = models.TextField()
author = models.ForeignKey(User)
created = models.DateTimeField()
class Actor(models.Model):
name = models.CharField(max_length=32)
class Movie(models.Model):
name = models.CharField(max_length=32)
actors = models.ManyToManyField(Actor, related_name='movies')
score = models.IntegerField(default=0)
<commit_msg>Create SmartTag model to demonstrate multi-word resource names.<commit_after>
|
from django.db import models
from django.contrib.auth.models import User
class Entry(models.Model):
content = models.TextField()
author = models.ForeignKey(User)
created = models.DateTimeField()
class Comment(models.Model):
post = models.ForeignKey(Entry, related_name='comments')
content = models.TextField()
author = models.ForeignKey(User)
created = models.DateTimeField()
class SmartTag(models.Model):
entry = models.ForeignKey(Entry, related_name='smart_tags')
name = models.CharField(max_length=32)
class Actor(models.Model):
name = models.CharField(max_length=32)
class Movie(models.Model):
name = models.CharField(max_length=32)
actors = models.ManyToManyField(Actor, related_name='movies')
score = models.IntegerField(default=0)
|
from django.db import models
from django.contrib.auth.models import User
class Entry(models.Model):
content = models.TextField()
author = models.ForeignKey(User)
created = models.DateTimeField()
class Comment(models.Model):
post = models.ForeignKey(Entry, related_name='comments')
content = models.TextField()
author = models.ForeignKey(User)
created = models.DateTimeField()
class Actor(models.Model):
name = models.CharField(max_length=32)
class Movie(models.Model):
name = models.CharField(max_length=32)
actors = models.ManyToManyField(Actor, related_name='movies')
score = models.IntegerField(default=0)
Create SmartTag model to demonstrate multi-word resource names.from django.db import models
from django.contrib.auth.models import User
class Entry(models.Model):
content = models.TextField()
author = models.ForeignKey(User)
created = models.DateTimeField()
class Comment(models.Model):
post = models.ForeignKey(Entry, related_name='comments')
content = models.TextField()
author = models.ForeignKey(User)
created = models.DateTimeField()
class SmartTag(models.Model):
entry = models.ForeignKey(Entry, related_name='smart_tags')
name = models.CharField(max_length=32)
class Actor(models.Model):
name = models.CharField(max_length=32)
class Movie(models.Model):
name = models.CharField(max_length=32)
actors = models.ManyToManyField(Actor, related_name='movies')
score = models.IntegerField(default=0)
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
class Entry(models.Model):
content = models.TextField()
author = models.ForeignKey(User)
created = models.DateTimeField()
class Comment(models.Model):
post = models.ForeignKey(Entry, related_name='comments')
content = models.TextField()
author = models.ForeignKey(User)
created = models.DateTimeField()
class Actor(models.Model):
name = models.CharField(max_length=32)
class Movie(models.Model):
name = models.CharField(max_length=32)
actors = models.ManyToManyField(Actor, related_name='movies')
score = models.IntegerField(default=0)
<commit_msg>Create SmartTag model to demonstrate multi-word resource names.<commit_after>from django.db import models
from django.contrib.auth.models import User
class Entry(models.Model):
content = models.TextField()
author = models.ForeignKey(User)
created = models.DateTimeField()
class Comment(models.Model):
post = models.ForeignKey(Entry, related_name='comments')
content = models.TextField()
author = models.ForeignKey(User)
created = models.DateTimeField()
class SmartTag(models.Model):
entry = models.ForeignKey(Entry, related_name='smart_tags')
name = models.CharField(max_length=32)
class Actor(models.Model):
name = models.CharField(max_length=32)
class Movie(models.Model):
name = models.CharField(max_length=32)
actors = models.ManyToManyField(Actor, related_name='movies')
score = models.IntegerField(default=0)
|
f35163ad752a52983d7d5ff9bfd383e98db06f0b
|
tests/test_pycookiecheat.py
|
tests/test_pycookiecheat.py
|
# -*- coding: utf-8 -*-
"""
test_pycookiecheat
----------------------------------
Tests for `pycookiecheat` module.
"""
from pycookiecheat import chrome_cookies
from uuid import uuid4
import pytest
def test_raises_on_empty():
with pytest.raises(TypeError):
broken = chrome_cookies()
def test_no_cookies():
never_been_here = 'http://{}.com'.format(uuid4())
empty_dict = chrome_cookies(never_been_here)
assert empty_dict == dict()
def test_n8henrie_com():
"""Tests a wordpress cookie that I think should be set. NB: Will fail unless you've visited my site in Chrome."""
cookies = chrome_cookies('http://n8henrie.com')
assert cookies['wordpress_test_cookie'] == 'WP+Cookie+check'
|
# -*- coding: utf-8 -*-
"""
test_pycookiecheat
----------------------------------
Tests for `pycookiecheat` module.
"""
from pycookiecheat import chrome_cookies
from uuid import uuid4
import pytest
import os
def test_raises_on_empty():
with pytest.raises(TypeError):
broken = chrome_cookies()
def test_no_cookies():
if os.getenv('TRAVIS', False) == 'true':
never_been_here = 'http://{}.com'.format(uuid4())
empty_dict = chrome_cookies(never_been_here)
assert empty_dict == dict()
else:
assert True
def test_n8henrie_com():
"""Tests a wordpress cookie that I think should be set. NB: Will fail
unless you've visited my site in Chrome."""
if os.getenv('TRAVIS', False) == 'true':
cookies = chrome_cookies('http://n8henrie.com')
assert cookies['wordpress_test_cookie'] == 'WP+Cookie+check'
else:
assert True
|
Test for travis-CI and skip tests accordingly.
|
Test for travis-CI and skip tests accordingly.
|
Python
|
mit
|
fxxkhand/pycookiecheat,n8henrie/pycookiecheat
|
# -*- coding: utf-8 -*-
"""
test_pycookiecheat
----------------------------------
Tests for `pycookiecheat` module.
"""
from pycookiecheat import chrome_cookies
from uuid import uuid4
import pytest
def test_raises_on_empty():
with pytest.raises(TypeError):
broken = chrome_cookies()
def test_no_cookies():
never_been_here = 'http://{}.com'.format(uuid4())
empty_dict = chrome_cookies(never_been_here)
assert empty_dict == dict()
def test_n8henrie_com():
"""Tests a wordpress cookie that I think should be set. NB: Will fail unless you've visited my site in Chrome."""
cookies = chrome_cookies('http://n8henrie.com')
assert cookies['wordpress_test_cookie'] == 'WP+Cookie+check'
Test for travis-CI and skip tests accordingly.
|
# -*- coding: utf-8 -*-
"""
test_pycookiecheat
----------------------------------
Tests for `pycookiecheat` module.
"""
from pycookiecheat import chrome_cookies
from uuid import uuid4
import pytest
import os
def test_raises_on_empty():
with pytest.raises(TypeError):
broken = chrome_cookies()
def test_no_cookies():
if os.getenv('TRAVIS', False) == 'true':
never_been_here = 'http://{}.com'.format(uuid4())
empty_dict = chrome_cookies(never_been_here)
assert empty_dict == dict()
else:
assert True
def test_n8henrie_com():
"""Tests a wordpress cookie that I think should be set. NB: Will fail
unless you've visited my site in Chrome."""
if os.getenv('TRAVIS', False) == 'true':
cookies = chrome_cookies('http://n8henrie.com')
assert cookies['wordpress_test_cookie'] == 'WP+Cookie+check'
else:
assert True
|
<commit_before># -*- coding: utf-8 -*-
"""
test_pycookiecheat
----------------------------------
Tests for `pycookiecheat` module.
"""
from pycookiecheat import chrome_cookies
from uuid import uuid4
import pytest
def test_raises_on_empty():
with pytest.raises(TypeError):
broken = chrome_cookies()
def test_no_cookies():
never_been_here = 'http://{}.com'.format(uuid4())
empty_dict = chrome_cookies(never_been_here)
assert empty_dict == dict()
def test_n8henrie_com():
"""Tests a wordpress cookie that I think should be set. NB: Will fail unless you've visited my site in Chrome."""
cookies = chrome_cookies('http://n8henrie.com')
assert cookies['wordpress_test_cookie'] == 'WP+Cookie+check'
<commit_msg>Test for travis-CI and skip tests accordingly.<commit_after>
|
# -*- coding: utf-8 -*-
"""
test_pycookiecheat
----------------------------------
Tests for `pycookiecheat` module.
"""
from pycookiecheat import chrome_cookies
from uuid import uuid4
import pytest
import os
def test_raises_on_empty():
with pytest.raises(TypeError):
broken = chrome_cookies()
def test_no_cookies():
if os.getenv('TRAVIS', False) == 'true':
never_been_here = 'http://{}.com'.format(uuid4())
empty_dict = chrome_cookies(never_been_here)
assert empty_dict == dict()
else:
assert True
def test_n8henrie_com():
"""Tests a wordpress cookie that I think should be set. NB: Will fail
unless you've visited my site in Chrome."""
if os.getenv('TRAVIS', False) == 'true':
cookies = chrome_cookies('http://n8henrie.com')
assert cookies['wordpress_test_cookie'] == 'WP+Cookie+check'
else:
assert True
|
# -*- coding: utf-8 -*-
"""
test_pycookiecheat
----------------------------------
Tests for `pycookiecheat` module.
"""
from pycookiecheat import chrome_cookies
from uuid import uuid4
import pytest
def test_raises_on_empty():
with pytest.raises(TypeError):
broken = chrome_cookies()
def test_no_cookies():
never_been_here = 'http://{}.com'.format(uuid4())
empty_dict = chrome_cookies(never_been_here)
assert empty_dict == dict()
def test_n8henrie_com():
"""Tests a wordpress cookie that I think should be set. NB: Will fail unless you've visited my site in Chrome."""
cookies = chrome_cookies('http://n8henrie.com')
assert cookies['wordpress_test_cookie'] == 'WP+Cookie+check'
Test for travis-CI and skip tests accordingly.# -*- coding: utf-8 -*-
"""
test_pycookiecheat
----------------------------------
Tests for `pycookiecheat` module.
"""
from pycookiecheat import chrome_cookies
from uuid import uuid4
import pytest
import os
def test_raises_on_empty():
with pytest.raises(TypeError):
broken = chrome_cookies()
def test_no_cookies():
if os.getenv('TRAVIS', False) == 'true':
never_been_here = 'http://{}.com'.format(uuid4())
empty_dict = chrome_cookies(never_been_here)
assert empty_dict == dict()
else:
assert True
def test_n8henrie_com():
"""Tests a wordpress cookie that I think should be set. NB: Will fail
unless you've visited my site in Chrome."""
if os.getenv('TRAVIS', False) == 'true':
cookies = chrome_cookies('http://n8henrie.com')
assert cookies['wordpress_test_cookie'] == 'WP+Cookie+check'
else:
assert True
|
<commit_before># -*- coding: utf-8 -*-
"""
test_pycookiecheat
----------------------------------
Tests for `pycookiecheat` module.
"""
from pycookiecheat import chrome_cookies
from uuid import uuid4
import pytest
def test_raises_on_empty():
with pytest.raises(TypeError):
broken = chrome_cookies()
def test_no_cookies():
never_been_here = 'http://{}.com'.format(uuid4())
empty_dict = chrome_cookies(never_been_here)
assert empty_dict == dict()
def test_n8henrie_com():
"""Tests a wordpress cookie that I think should be set. NB: Will fail unless you've visited my site in Chrome."""
cookies = chrome_cookies('http://n8henrie.com')
assert cookies['wordpress_test_cookie'] == 'WP+Cookie+check'
<commit_msg>Test for travis-CI and skip tests accordingly.<commit_after># -*- coding: utf-8 -*-
"""
test_pycookiecheat
----------------------------------
Tests for `pycookiecheat` module.
"""
from pycookiecheat import chrome_cookies
from uuid import uuid4
import pytest
import os
def test_raises_on_empty():
with pytest.raises(TypeError):
broken = chrome_cookies()
def test_no_cookies():
if os.getenv('TRAVIS', False) == 'true':
never_been_here = 'http://{}.com'.format(uuid4())
empty_dict = chrome_cookies(never_been_here)
assert empty_dict == dict()
else:
assert True
def test_n8henrie_com():
"""Tests a wordpress cookie that I think should be set. NB: Will fail
unless you've visited my site in Chrome."""
if os.getenv('TRAVIS', False) == 'true':
cookies = chrome_cookies('http://n8henrie.com')
assert cookies['wordpress_test_cookie'] == 'WP+Cookie+check'
else:
assert True
|
a1f118f2d4068d0aeffc1b02efcd7337de6ffab1
|
tests/run_tests.py
|
tests/run_tests.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import nose
def start(argv=None):
sys.exitfunc = lambda: sys.stderr.write('Shutting down...\n')
if argv is None:
argv = [
'nosetests',
'--verbose',
'--with-coverage',
'--cover-html', '--cover-html-dir=.htmlcov',
'--cover-erase',
'--cover-branches',
'--cover-package=chemtrails',
]
nose.run_exit(argv=argv, defaultTest=os.path.abspath(os.path.dirname(__file__)))
if __name__ == '__main__':
start(sys.argv)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import nose
import logging
logging.disable(logging.DEBUG) # Disable debug logging when running the test suite.
def start(argv=None):
sys.exitfunc = lambda: sys.stderr.write('Shutting down...\n')
if argv is None:
argv = [
'nosetests',
'--verbose',
'--with-coverage',
'--cover-html', '--cover-html-dir=.htmlcov',
'--cover-erase',
'--cover-branches',
'--cover-package=chemtrails',
]
nose.run_exit(argv=argv, defaultTest=os.path.abspath(os.path.dirname(__file__)))
if __name__ == '__main__':
start(sys.argv)
|
Disable debug logging when running the test suite
|
Disable debug logging when running the test suite
|
Python
|
mit
|
inonit/django-chemtrails,inonit/django-chemtrails,inonit/django-chemtrails
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import nose
def start(argv=None):
sys.exitfunc = lambda: sys.stderr.write('Shutting down...\n')
if argv is None:
argv = [
'nosetests',
'--verbose',
'--with-coverage',
'--cover-html', '--cover-html-dir=.htmlcov',
'--cover-erase',
'--cover-branches',
'--cover-package=chemtrails',
]
nose.run_exit(argv=argv, defaultTest=os.path.abspath(os.path.dirname(__file__)))
if __name__ == '__main__':
start(sys.argv)
Disable debug logging when running the test suite
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import nose
import logging
logging.disable(logging.DEBUG) # Disable debug logging when running the test suite.
def start(argv=None):
sys.exitfunc = lambda: sys.stderr.write('Shutting down...\n')
if argv is None:
argv = [
'nosetests',
'--verbose',
'--with-coverage',
'--cover-html', '--cover-html-dir=.htmlcov',
'--cover-erase',
'--cover-branches',
'--cover-package=chemtrails',
]
nose.run_exit(argv=argv, defaultTest=os.path.abspath(os.path.dirname(__file__)))
if __name__ == '__main__':
start(sys.argv)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import nose
def start(argv=None):
sys.exitfunc = lambda: sys.stderr.write('Shutting down...\n')
if argv is None:
argv = [
'nosetests',
'--verbose',
'--with-coverage',
'--cover-html', '--cover-html-dir=.htmlcov',
'--cover-erase',
'--cover-branches',
'--cover-package=chemtrails',
]
nose.run_exit(argv=argv, defaultTest=os.path.abspath(os.path.dirname(__file__)))
if __name__ == '__main__':
start(sys.argv)
<commit_msg>Disable debug logging when running the test suite<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import nose
import logging
logging.disable(logging.DEBUG) # Disable debug logging when running the test suite.
def start(argv=None):
sys.exitfunc = lambda: sys.stderr.write('Shutting down...\n')
if argv is None:
argv = [
'nosetests',
'--verbose',
'--with-coverage',
'--cover-html', '--cover-html-dir=.htmlcov',
'--cover-erase',
'--cover-branches',
'--cover-package=chemtrails',
]
nose.run_exit(argv=argv, defaultTest=os.path.abspath(os.path.dirname(__file__)))
if __name__ == '__main__':
start(sys.argv)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import nose
def start(argv=None):
sys.exitfunc = lambda: sys.stderr.write('Shutting down...\n')
if argv is None:
argv = [
'nosetests',
'--verbose',
'--with-coverage',
'--cover-html', '--cover-html-dir=.htmlcov',
'--cover-erase',
'--cover-branches',
'--cover-package=chemtrails',
]
nose.run_exit(argv=argv, defaultTest=os.path.abspath(os.path.dirname(__file__)))
if __name__ == '__main__':
start(sys.argv)
Disable debug logging when running the test suite#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import nose
import logging
logging.disable(logging.DEBUG) # Disable debug logging when running the test suite.
def start(argv=None):
sys.exitfunc = lambda: sys.stderr.write('Shutting down...\n')
if argv is None:
argv = [
'nosetests',
'--verbose',
'--with-coverage',
'--cover-html', '--cover-html-dir=.htmlcov',
'--cover-erase',
'--cover-branches',
'--cover-package=chemtrails',
]
nose.run_exit(argv=argv, defaultTest=os.path.abspath(os.path.dirname(__file__)))
if __name__ == '__main__':
start(sys.argv)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import nose
def start(argv=None):
sys.exitfunc = lambda: sys.stderr.write('Shutting down...\n')
if argv is None:
argv = [
'nosetests',
'--verbose',
'--with-coverage',
'--cover-html', '--cover-html-dir=.htmlcov',
'--cover-erase',
'--cover-branches',
'--cover-package=chemtrails',
]
nose.run_exit(argv=argv, defaultTest=os.path.abspath(os.path.dirname(__file__)))
if __name__ == '__main__':
start(sys.argv)
<commit_msg>Disable debug logging when running the test suite<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import nose
import logging
logging.disable(logging.DEBUG) # Disable debug logging when running the test suite.
def start(argv=None):
sys.exitfunc = lambda: sys.stderr.write('Shutting down...\n')
if argv is None:
argv = [
'nosetests',
'--verbose',
'--with-coverage',
'--cover-html', '--cover-html-dir=.htmlcov',
'--cover-erase',
'--cover-branches',
'--cover-package=chemtrails',
]
nose.run_exit(argv=argv, defaultTest=os.path.abspath(os.path.dirname(__file__)))
if __name__ == '__main__':
start(sys.argv)
|
5b282d9322a676b4185fcd253f338a342ec5e5ce
|
.config/i3/py3status/playerctlbar.py
|
.config/i3/py3status/playerctlbar.py
|
# py3status module for playerctl
import subprocess
def run(*cmdlist):
return subprocess.run(cmdlist, stdout=subprocess.PIPE).stdout.decode()
def player_args(players):
if not players:
return 'playerctl',
else:
return 'playerctl', '-p', players
def get_status(players):
status = run(*player_args(players), 'status')[:-1]
if status in ('Playing', 'Paused'):
return status
return ''
def get_info(players, fmt):
args = 'metadata', '--format', f'{fmt}'
return run(*player_args(players), *args).strip()
class Py3status:
players = ''
format = '{{ artist }} / {{ title }}'
def spotbar(self):
text_format = "[[ {info} ]]|[ {status} ]"
params = {'status': get_status(self.players)}
if params['status'] == 'Playing':
params['info'] = get_info(self.players, self.format)
if params['info'] == '/ -':
params['info'] = None
return {
'full_text': self.py3.safe_format(text_format, params),
'cached_until': self.py3.time_in(seconds=1)
}
def on_click(self, event):
if event['button'] == 1:
run('playerctl', 'play-pause')
if __name__ == '__main__':
from py3status.module_test import module_test
module_test(Py3status)
|
# py3status module for playerctl
import subprocess
def run(*cmdlist):
return subprocess.run(
cmdlist,
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL).stdout.decode()
def player_args(players):
if not players:
return 'playerctl',
else:
return 'playerctl', '-p', players
def get_status(players):
status = run(*player_args(players), 'status')[:-1]
if status in ('Playing', 'Paused'):
return status
return ''
def get_info(players, fmt):
args = 'metadata', '--format', f'{fmt}'
return run(*player_args(players), *args).strip()
class Py3status:
players = ''
format = '{{ artist }} / {{ title }}'
def spotbar(self):
text_format = "[[ {info} ]]|[ {status} ]"
params = {'status': get_status(self.players)}
if params['status'] == 'Playing':
params['info'] = get_info(self.players, self.format)
if params['info'] == '/ -':
params['info'] = None
return {
'full_text': self.py3.safe_format(text_format, params),
'cached_until': self.py3.time_in(seconds=1)
}
def on_click(self, event):
if event['button'] == 1:
run('playerctl', 'play-pause')
if __name__ == '__main__':
from py3status.module_test import module_test
module_test(Py3status)
|
Fix stderr from playerctl bar
|
Fix stderr from playerctl bar
|
Python
|
unlicense
|
louisswarren/dotfiles,louisswarren/dotfiles
|
# py3status module for playerctl
import subprocess
def run(*cmdlist):
return subprocess.run(cmdlist, stdout=subprocess.PIPE).stdout.decode()
def player_args(players):
if not players:
return 'playerctl',
else:
return 'playerctl', '-p', players
def get_status(players):
status = run(*player_args(players), 'status')[:-1]
if status in ('Playing', 'Paused'):
return status
return ''
def get_info(players, fmt):
args = 'metadata', '--format', f'{fmt}'
return run(*player_args(players), *args).strip()
class Py3status:
players = ''
format = '{{ artist }} / {{ title }}'
def spotbar(self):
text_format = "[[ {info} ]]|[ {status} ]"
params = {'status': get_status(self.players)}
if params['status'] == 'Playing':
params['info'] = get_info(self.players, self.format)
if params['info'] == '/ -':
params['info'] = None
return {
'full_text': self.py3.safe_format(text_format, params),
'cached_until': self.py3.time_in(seconds=1)
}
def on_click(self, event):
if event['button'] == 1:
run('playerctl', 'play-pause')
if __name__ == '__main__':
from py3status.module_test import module_test
module_test(Py3status)
Fix stderr from playerctl bar
|
# py3status module for playerctl
import subprocess
def run(*cmdlist):
return subprocess.run(
cmdlist,
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL).stdout.decode()
def player_args(players):
if not players:
return 'playerctl',
else:
return 'playerctl', '-p', players
def get_status(players):
status = run(*player_args(players), 'status')[:-1]
if status in ('Playing', 'Paused'):
return status
return ''
def get_info(players, fmt):
args = 'metadata', '--format', f'{fmt}'
return run(*player_args(players), *args).strip()
class Py3status:
players = ''
format = '{{ artist }} / {{ title }}'
def spotbar(self):
text_format = "[[ {info} ]]|[ {status} ]"
params = {'status': get_status(self.players)}
if params['status'] == 'Playing':
params['info'] = get_info(self.players, self.format)
if params['info'] == '/ -':
params['info'] = None
return {
'full_text': self.py3.safe_format(text_format, params),
'cached_until': self.py3.time_in(seconds=1)
}
def on_click(self, event):
if event['button'] == 1:
run('playerctl', 'play-pause')
if __name__ == '__main__':
from py3status.module_test import module_test
module_test(Py3status)
|
<commit_before># py3status module for playerctl
import subprocess
def run(*cmdlist):
return subprocess.run(cmdlist, stdout=subprocess.PIPE).stdout.decode()
def player_args(players):
if not players:
return 'playerctl',
else:
return 'playerctl', '-p', players
def get_status(players):
status = run(*player_args(players), 'status')[:-1]
if status in ('Playing', 'Paused'):
return status
return ''
def get_info(players, fmt):
args = 'metadata', '--format', f'{fmt}'
return run(*player_args(players), *args).strip()
class Py3status:
players = ''
format = '{{ artist }} / {{ title }}'
def spotbar(self):
text_format = "[[ {info} ]]|[ {status} ]"
params = {'status': get_status(self.players)}
if params['status'] == 'Playing':
params['info'] = get_info(self.players, self.format)
if params['info'] == '/ -':
params['info'] = None
return {
'full_text': self.py3.safe_format(text_format, params),
'cached_until': self.py3.time_in(seconds=1)
}
def on_click(self, event):
if event['button'] == 1:
run('playerctl', 'play-pause')
if __name__ == '__main__':
from py3status.module_test import module_test
module_test(Py3status)
<commit_msg>Fix stderr from playerctl bar<commit_after>
|
# py3status module for playerctl
import subprocess
def run(*cmdlist):
return subprocess.run(
cmdlist,
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL).stdout.decode()
def player_args(players):
if not players:
return 'playerctl',
else:
return 'playerctl', '-p', players
def get_status(players):
status = run(*player_args(players), 'status')[:-1]
if status in ('Playing', 'Paused'):
return status
return ''
def get_info(players, fmt):
args = 'metadata', '--format', f'{fmt}'
return run(*player_args(players), *args).strip()
class Py3status:
players = ''
format = '{{ artist }} / {{ title }}'
def spotbar(self):
text_format = "[[ {info} ]]|[ {status} ]"
params = {'status': get_status(self.players)}
if params['status'] == 'Playing':
params['info'] = get_info(self.players, self.format)
if params['info'] == '/ -':
params['info'] = None
return {
'full_text': self.py3.safe_format(text_format, params),
'cached_until': self.py3.time_in(seconds=1)
}
def on_click(self, event):
if event['button'] == 1:
run('playerctl', 'play-pause')
if __name__ == '__main__':
from py3status.module_test import module_test
module_test(Py3status)
|
# py3status module for playerctl
import subprocess
def run(*cmdlist):
return subprocess.run(cmdlist, stdout=subprocess.PIPE).stdout.decode()
def player_args(players):
if not players:
return 'playerctl',
else:
return 'playerctl', '-p', players
def get_status(players):
status = run(*player_args(players), 'status')[:-1]
if status in ('Playing', 'Paused'):
return status
return ''
def get_info(players, fmt):
args = 'metadata', '--format', f'{fmt}'
return run(*player_args(players), *args).strip()
class Py3status:
players = ''
format = '{{ artist }} / {{ title }}'
def spotbar(self):
text_format = "[[ {info} ]]|[ {status} ]"
params = {'status': get_status(self.players)}
if params['status'] == 'Playing':
params['info'] = get_info(self.players, self.format)
if params['info'] == '/ -':
params['info'] = None
return {
'full_text': self.py3.safe_format(text_format, params),
'cached_until': self.py3.time_in(seconds=1)
}
def on_click(self, event):
if event['button'] == 1:
run('playerctl', 'play-pause')
if __name__ == '__main__':
from py3status.module_test import module_test
module_test(Py3status)
Fix stderr from playerctl bar# py3status module for playerctl
import subprocess
def run(*cmdlist):
return subprocess.run(
cmdlist,
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL).stdout.decode()
def player_args(players):
if not players:
return 'playerctl',
else:
return 'playerctl', '-p', players
def get_status(players):
status = run(*player_args(players), 'status')[:-1]
if status in ('Playing', 'Paused'):
return status
return ''
def get_info(players, fmt):
args = 'metadata', '--format', f'{fmt}'
return run(*player_args(players), *args).strip()
class Py3status:
players = ''
format = '{{ artist }} / {{ title }}'
def spotbar(self):
text_format = "[[ {info} ]]|[ {status} ]"
params = {'status': get_status(self.players)}
if params['status'] == 'Playing':
params['info'] = get_info(self.players, self.format)
if params['info'] == '/ -':
params['info'] = None
return {
'full_text': self.py3.safe_format(text_format, params),
'cached_until': self.py3.time_in(seconds=1)
}
def on_click(self, event):
if event['button'] == 1:
run('playerctl', 'play-pause')
if __name__ == '__main__':
from py3status.module_test import module_test
module_test(Py3status)
|
<commit_before># py3status module for playerctl
import subprocess
def run(*cmdlist):
return subprocess.run(cmdlist, stdout=subprocess.PIPE).stdout.decode()
def player_args(players):
if not players:
return 'playerctl',
else:
return 'playerctl', '-p', players
def get_status(players):
status = run(*player_args(players), 'status')[:-1]
if status in ('Playing', 'Paused'):
return status
return ''
def get_info(players, fmt):
args = 'metadata', '--format', f'{fmt}'
return run(*player_args(players), *args).strip()
class Py3status:
players = ''
format = '{{ artist }} / {{ title }}'
def spotbar(self):
text_format = "[[ {info} ]]|[ {status} ]"
params = {'status': get_status(self.players)}
if params['status'] == 'Playing':
params['info'] = get_info(self.players, self.format)
if params['info'] == '/ -':
params['info'] = None
return {
'full_text': self.py3.safe_format(text_format, params),
'cached_until': self.py3.time_in(seconds=1)
}
def on_click(self, event):
if event['button'] == 1:
run('playerctl', 'play-pause')
if __name__ == '__main__':
from py3status.module_test import module_test
module_test(Py3status)
<commit_msg>Fix stderr from playerctl bar<commit_after># py3status module for playerctl
import subprocess
def run(*cmdlist):
return subprocess.run(
cmdlist,
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL).stdout.decode()
def player_args(players):
if not players:
return 'playerctl',
else:
return 'playerctl', '-p', players
def get_status(players):
status = run(*player_args(players), 'status')[:-1]
if status in ('Playing', 'Paused'):
return status
return ''
def get_info(players, fmt):
args = 'metadata', '--format', f'{fmt}'
return run(*player_args(players), *args).strip()
class Py3status:
players = ''
format = '{{ artist }} / {{ title }}'
def spotbar(self):
text_format = "[[ {info} ]]|[ {status} ]"
params = {'status': get_status(self.players)}
if params['status'] == 'Playing':
params['info'] = get_info(self.players, self.format)
if params['info'] == '/ -':
params['info'] = None
return {
'full_text': self.py3.safe_format(text_format, params),
'cached_until': self.py3.time_in(seconds=1)
}
def on_click(self, event):
if event['button'] == 1:
run('playerctl', 'play-pause')
if __name__ == '__main__':
from py3status.module_test import module_test
module_test(Py3status)
|
7527ce1b48f769d33eb5ede3d54413e51eb2ac12
|
senkumba/models.py
|
senkumba/models.py
|
from django.contrib.auth.models import User
def user_new_str(self):
return self.username if self.get_full_name() == "" else self.get_full_name()
# Replace the __str__ method in the User class with our new implementation
User.__str__ = user_new_str
|
from django.contrib import admin
from django.contrib.auth.models import User
def user_new_str(self):
return self.username if self.get_full_name() == "" else self.get_full_name()
# Replace the __str__ method in the User class with our new implementation
User.__str__ = user_new_str
admin.site.site_header = 'SENKUMBA'
admin.site.site_title = 'SENKUMBA'
admin.site.index_title = 'SENKUMBA'
|
Change titles for the site
|
Change titles for the site
|
Python
|
mit
|
lubegamark/senkumba
|
from django.contrib.auth.models import User
def user_new_str(self):
return self.username if self.get_full_name() == "" else self.get_full_name()
# Replace the __str__ method in the User class with our new implementation
User.__str__ = user_new_strChange titles for the site
|
from django.contrib import admin
from django.contrib.auth.models import User
def user_new_str(self):
return self.username if self.get_full_name() == "" else self.get_full_name()
# Replace the __str__ method in the User class with our new implementation
User.__str__ = user_new_str
admin.site.site_header = 'SENKUMBA'
admin.site.site_title = 'SENKUMBA'
admin.site.index_title = 'SENKUMBA'
|
<commit_before>from django.contrib.auth.models import User
def user_new_str(self):
return self.username if self.get_full_name() == "" else self.get_full_name()
# Replace the __str__ method in the User class with our new implementation
User.__str__ = user_new_str<commit_msg>Change titles for the site<commit_after>
|
from django.contrib import admin
from django.contrib.auth.models import User
def user_new_str(self):
return self.username if self.get_full_name() == "" else self.get_full_name()
# Replace the __str__ method in the User class with our new implementation
User.__str__ = user_new_str
admin.site.site_header = 'SENKUMBA'
admin.site.site_title = 'SENKUMBA'
admin.site.index_title = 'SENKUMBA'
|
from django.contrib.auth.models import User
def user_new_str(self):
return self.username if self.get_full_name() == "" else self.get_full_name()
# Replace the __str__ method in the User class with our new implementation
User.__str__ = user_new_strChange titles for the sitefrom django.contrib import admin
from django.contrib.auth.models import User
def user_new_str(self):
return self.username if self.get_full_name() == "" else self.get_full_name()
# Replace the __str__ method in the User class with our new implementation
User.__str__ = user_new_str
admin.site.site_header = 'SENKUMBA'
admin.site.site_title = 'SENKUMBA'
admin.site.index_title = 'SENKUMBA'
|
<commit_before>from django.contrib.auth.models import User
def user_new_str(self):
return self.username if self.get_full_name() == "" else self.get_full_name()
# Replace the __str__ method in the User class with our new implementation
User.__str__ = user_new_str<commit_msg>Change titles for the site<commit_after>from django.contrib import admin
from django.contrib.auth.models import User
def user_new_str(self):
return self.username if self.get_full_name() == "" else self.get_full_name()
# Replace the __str__ method in the User class with our new implementation
User.__str__ = user_new_str
admin.site.site_header = 'SENKUMBA'
admin.site.site_title = 'SENKUMBA'
admin.site.index_title = 'SENKUMBA'
|
d3a203725d13a7abef091f0070f90826d3225dbc
|
settings_travis.py
|
settings_travis.py
|
import ssl
LDAP_SERVER = 'ldap.rserver.de'
LDAP_PORT = 3389
LDAP_SSL_PORT = 6636
LDAP_REQUIRE_CERT = ssl.CERT_NONE
|
import ssl
LDAP_SERVER = 'ldap.rserver.de'
LDAP_PORT = 3389
LDAP_SSL_PORT = 6636
LDAP_REQUIRE_CERT = ssl.CERT_NONE
LDAP_TLS_VERSION = ssl.PROTOCOL_TLSv1
|
Fix travis unit test for python 3.3
|
Fix travis unit test for python 3.3
|
Python
|
bsd-2-clause
|
rroemhild/flask-ldapconn
|
import ssl
LDAP_SERVER = 'ldap.rserver.de'
LDAP_PORT = 3389
LDAP_SSL_PORT = 6636
LDAP_REQUIRE_CERT = ssl.CERT_NONE
Fix travis unit test for python 3.3
|
import ssl
LDAP_SERVER = 'ldap.rserver.de'
LDAP_PORT = 3389
LDAP_SSL_PORT = 6636
LDAP_REQUIRE_CERT = ssl.CERT_NONE
LDAP_TLS_VERSION = ssl.PROTOCOL_TLSv1
|
<commit_before>import ssl
LDAP_SERVER = 'ldap.rserver.de'
LDAP_PORT = 3389
LDAP_SSL_PORT = 6636
LDAP_REQUIRE_CERT = ssl.CERT_NONE
<commit_msg>Fix travis unit test for python 3.3<commit_after>
|
import ssl
LDAP_SERVER = 'ldap.rserver.de'
LDAP_PORT = 3389
LDAP_SSL_PORT = 6636
LDAP_REQUIRE_CERT = ssl.CERT_NONE
LDAP_TLS_VERSION = ssl.PROTOCOL_TLSv1
|
import ssl
LDAP_SERVER = 'ldap.rserver.de'
LDAP_PORT = 3389
LDAP_SSL_PORT = 6636
LDAP_REQUIRE_CERT = ssl.CERT_NONE
Fix travis unit test for python 3.3import ssl
LDAP_SERVER = 'ldap.rserver.de'
LDAP_PORT = 3389
LDAP_SSL_PORT = 6636
LDAP_REQUIRE_CERT = ssl.CERT_NONE
LDAP_TLS_VERSION = ssl.PROTOCOL_TLSv1
|
<commit_before>import ssl
LDAP_SERVER = 'ldap.rserver.de'
LDAP_PORT = 3389
LDAP_SSL_PORT = 6636
LDAP_REQUIRE_CERT = ssl.CERT_NONE
<commit_msg>Fix travis unit test for python 3.3<commit_after>import ssl
LDAP_SERVER = 'ldap.rserver.de'
LDAP_PORT = 3389
LDAP_SSL_PORT = 6636
LDAP_REQUIRE_CERT = ssl.CERT_NONE
LDAP_TLS_VERSION = ssl.PROTOCOL_TLSv1
|
c84e22824cd5546406656ecc06a7dcd37a013954
|
shopit_app/urls.py
|
shopit_app/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
import authentication_app.views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'gettingstarted.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', authentication_app.views.index, name='index'),
url(r'^db', authentication_app.views.db, name='db'),
url(r'^admin/', include(admin.site.urls)),
)
|
from rest_frmaework_nested import routers
from authentication_app.views import AccountViewSet
router = routers.SimpleRouter()
router.register(r'accounts', AccountViewSet)
urlpatterns = patterns('',
# APIendpoints
url(r'^api/v1/', include(router.urls)),
url('^.*$', IndexView.as_view(), name='index'),
)
|
Add the API endpoint url for the account view set.
|
Add the API endpoint url for the account view set.
|
Python
|
mit
|
mvpgomes/shopit-app,mvpgomes/shopit-app,mvpgomes/shopit-app,mvpgomes/shopit-app
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
import authentication_app.views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'gettingstarted.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', authentication_app.views.index, name='index'),
url(r'^db', authentication_app.views.db, name='db'),
url(r'^admin/', include(admin.site.urls)),
)
Add the API endpoint url for the account view set.
|
from rest_frmaework_nested import routers
from authentication_app.views import AccountViewSet
router = routers.SimpleRouter()
router.register(r'accounts', AccountViewSet)
urlpatterns = patterns('',
# APIendpoints
url(r'^api/v1/', include(router.urls)),
url('^.*$', IndexView.as_view(), name='index'),
)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
import authentication_app.views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'gettingstarted.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', authentication_app.views.index, name='index'),
url(r'^db', authentication_app.views.db, name='db'),
url(r'^admin/', include(admin.site.urls)),
)
<commit_msg>Add the API endpoint url for the account view set.<commit_after>
|
from rest_frmaework_nested import routers
from authentication_app.views import AccountViewSet
router = routers.SimpleRouter()
router.register(r'accounts', AccountViewSet)
urlpatterns = patterns('',
# APIendpoints
url(r'^api/v1/', include(router.urls)),
url('^.*$', IndexView.as_view(), name='index'),
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
import authentication_app.views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'gettingstarted.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', authentication_app.views.index, name='index'),
url(r'^db', authentication_app.views.db, name='db'),
url(r'^admin/', include(admin.site.urls)),
)
Add the API endpoint url for the account view set.from rest_frmaework_nested import routers
from authentication_app.views import AccountViewSet
router = routers.SimpleRouter()
router.register(r'accounts', AccountViewSet)
urlpatterns = patterns('',
# APIendpoints
url(r'^api/v1/', include(router.urls)),
url('^.*$', IndexView.as_view(), name='index'),
)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
import authentication_app.views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'gettingstarted.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', authentication_app.views.index, name='index'),
url(r'^db', authentication_app.views.db, name='db'),
url(r'^admin/', include(admin.site.urls)),
)
<commit_msg>Add the API endpoint url for the account view set.<commit_after>from rest_frmaework_nested import routers
from authentication_app.views import AccountViewSet
router = routers.SimpleRouter()
router.register(r'accounts', AccountViewSet)
urlpatterns = patterns('',
# APIendpoints
url(r'^api/v1/', include(router.urls)),
url('^.*$', IndexView.as_view(), name='index'),
)
|
f896d0fa40250a580fee584217c5a4c1d39d7388
|
snipper/snippet.py
|
snipper/snippet.py
|
import os
from os import path
import glob
import json
import subprocess
class Snippet(object):
def __init__(self, config, username, snippet_id):
self.config = config
self.username = username
self.snippet_id = snippet_id
repo_parent = path.join(self.config.get('snippet_home'), username)
self.repo_path = glob.glob(path.join(repo_parent, '*{}'.format(self.snippet_id)))[0]
@staticmethod
def clone(url, clone_to):
#TODO: Add log line for notifying user.
# subprocess.DEVNULL
subprocess.call(['git', 'clone', url, clone_to])
@staticmethod
def pull(repo_dir):
# TODO: Add log line for notifying user.
subprocess.call(['git', '--git-dir={}/.git'.format(repo_dir), 'pull'])
def get_files(self):
metadata_file = path.join(self.config.get('snippet_home'), 'metadata.json')
with open(metadata_file, 'r') as f:
data = json.loads(f.read())
for item in data['values']:
if item['id'] != self.snippet_id:
continue
return [f for f in os.listdir(self.repo_path) if path.isfile(path.join(self.repo_path, f))]
|
import os
from os import path
import glob
import json
import subprocess
class Snippet(object):
def __init__(self, config, username, snippet_id):
self.config = config
self.username = username
self.snippet_id = snippet_id
repo_parent = path.join(self.config.get('snippet_home'), username)
self.repo_path = glob.glob(path.join(repo_parent, '*{}'.format(self.snippet_id)))[0]
@staticmethod
def clone(url, clone_to):
#TODO: Add log line for notifying user.
# subprocess.DEVNULL
subprocess.call(['git', 'clone', url, clone_to])
@staticmethod
def pull(repo_dir):
# TODO: Add log line for notifying user.
subprocess.call(['git', '--git-dir={}/.git'.format(repo_dir), 'pull'])
def get_files(self):
""" Return files of snippet """
metadata_file = path.join(self.config.get('snippet_home'), 'metadata.json')
with open(metadata_file, 'r') as f:
data = json.loads(f.read())
for item in data['values']:
if item['id'] != self.snippet_id:
continue
return [f for f in os.listdir(self.repo_path) if path.isfile(path.join(self.repo_path, f))]
|
Add doc string to Snippet.get_files
|
Add doc string to Snippet.get_files
|
Python
|
mit
|
mesuutt/snipper
|
import os
from os import path
import glob
import json
import subprocess
class Snippet(object):
def __init__(self, config, username, snippet_id):
self.config = config
self.username = username
self.snippet_id = snippet_id
repo_parent = path.join(self.config.get('snippet_home'), username)
self.repo_path = glob.glob(path.join(repo_parent, '*{}'.format(self.snippet_id)))[0]
@staticmethod
def clone(url, clone_to):
#TODO: Add log line for notifying user.
# subprocess.DEVNULL
subprocess.call(['git', 'clone', url, clone_to])
@staticmethod
def pull(repo_dir):
# TODO: Add log line for notifying user.
subprocess.call(['git', '--git-dir={}/.git'.format(repo_dir), 'pull'])
def get_files(self):
metadata_file = path.join(self.config.get('snippet_home'), 'metadata.json')
with open(metadata_file, 'r') as f:
data = json.loads(f.read())
for item in data['values']:
if item['id'] != self.snippet_id:
continue
return [f for f in os.listdir(self.repo_path) if path.isfile(path.join(self.repo_path, f))]
Add doc string to Snippet.get_files
|
import os
from os import path
import glob
import json
import subprocess
class Snippet(object):
def __init__(self, config, username, snippet_id):
self.config = config
self.username = username
self.snippet_id = snippet_id
repo_parent = path.join(self.config.get('snippet_home'), username)
self.repo_path = glob.glob(path.join(repo_parent, '*{}'.format(self.snippet_id)))[0]
@staticmethod
def clone(url, clone_to):
#TODO: Add log line for notifying user.
# subprocess.DEVNULL
subprocess.call(['git', 'clone', url, clone_to])
@staticmethod
def pull(repo_dir):
# TODO: Add log line for notifying user.
subprocess.call(['git', '--git-dir={}/.git'.format(repo_dir), 'pull'])
def get_files(self):
""" Return files of snippet """
metadata_file = path.join(self.config.get('snippet_home'), 'metadata.json')
with open(metadata_file, 'r') as f:
data = json.loads(f.read())
for item in data['values']:
if item['id'] != self.snippet_id:
continue
return [f for f in os.listdir(self.repo_path) if path.isfile(path.join(self.repo_path, f))]
|
<commit_before>import os
from os import path
import glob
import json
import subprocess
class Snippet(object):
def __init__(self, config, username, snippet_id):
self.config = config
self.username = username
self.snippet_id = snippet_id
repo_parent = path.join(self.config.get('snippet_home'), username)
self.repo_path = glob.glob(path.join(repo_parent, '*{}'.format(self.snippet_id)))[0]
@staticmethod
def clone(url, clone_to):
#TODO: Add log line for notifying user.
# subprocess.DEVNULL
subprocess.call(['git', 'clone', url, clone_to])
@staticmethod
def pull(repo_dir):
# TODO: Add log line for notifying user.
subprocess.call(['git', '--git-dir={}/.git'.format(repo_dir), 'pull'])
def get_files(self):
metadata_file = path.join(self.config.get('snippet_home'), 'metadata.json')
with open(metadata_file, 'r') as f:
data = json.loads(f.read())
for item in data['values']:
if item['id'] != self.snippet_id:
continue
return [f for f in os.listdir(self.repo_path) if path.isfile(path.join(self.repo_path, f))]
<commit_msg>Add doc string to Snippet.get_files<commit_after>
|
import os
from os import path
import glob
import json
import subprocess
class Snippet(object):
def __init__(self, config, username, snippet_id):
self.config = config
self.username = username
self.snippet_id = snippet_id
repo_parent = path.join(self.config.get('snippet_home'), username)
self.repo_path = glob.glob(path.join(repo_parent, '*{}'.format(self.snippet_id)))[0]
@staticmethod
def clone(url, clone_to):
#TODO: Add log line for notifying user.
# subprocess.DEVNULL
subprocess.call(['git', 'clone', url, clone_to])
@staticmethod
def pull(repo_dir):
# TODO: Add log line for notifying user.
subprocess.call(['git', '--git-dir={}/.git'.format(repo_dir), 'pull'])
def get_files(self):
""" Return files of snippet """
metadata_file = path.join(self.config.get('snippet_home'), 'metadata.json')
with open(metadata_file, 'r') as f:
data = json.loads(f.read())
for item in data['values']:
if item['id'] != self.snippet_id:
continue
return [f for f in os.listdir(self.repo_path) if path.isfile(path.join(self.repo_path, f))]
|
import os
from os import path
import glob
import json
import subprocess
class Snippet(object):
def __init__(self, config, username, snippet_id):
self.config = config
self.username = username
self.snippet_id = snippet_id
repo_parent = path.join(self.config.get('snippet_home'), username)
self.repo_path = glob.glob(path.join(repo_parent, '*{}'.format(self.snippet_id)))[0]
@staticmethod
def clone(url, clone_to):
#TODO: Add log line for notifying user.
# subprocess.DEVNULL
subprocess.call(['git', 'clone', url, clone_to])
@staticmethod
def pull(repo_dir):
# TODO: Add log line for notifying user.
subprocess.call(['git', '--git-dir={}/.git'.format(repo_dir), 'pull'])
def get_files(self):
metadata_file = path.join(self.config.get('snippet_home'), 'metadata.json')
with open(metadata_file, 'r') as f:
data = json.loads(f.read())
for item in data['values']:
if item['id'] != self.snippet_id:
continue
return [f for f in os.listdir(self.repo_path) if path.isfile(path.join(self.repo_path, f))]
Add doc string to Snippet.get_filesimport os
from os import path
import glob
import json
import subprocess
class Snippet(object):
def __init__(self, config, username, snippet_id):
self.config = config
self.username = username
self.snippet_id = snippet_id
repo_parent = path.join(self.config.get('snippet_home'), username)
self.repo_path = glob.glob(path.join(repo_parent, '*{}'.format(self.snippet_id)))[0]
@staticmethod
def clone(url, clone_to):
#TODO: Add log line for notifying user.
# subprocess.DEVNULL
subprocess.call(['git', 'clone', url, clone_to])
@staticmethod
def pull(repo_dir):
# TODO: Add log line for notifying user.
subprocess.call(['git', '--git-dir={}/.git'.format(repo_dir), 'pull'])
def get_files(self):
""" Return files of snippet """
metadata_file = path.join(self.config.get('snippet_home'), 'metadata.json')
with open(metadata_file, 'r') as f:
data = json.loads(f.read())
for item in data['values']:
if item['id'] != self.snippet_id:
continue
return [f for f in os.listdir(self.repo_path) if path.isfile(path.join(self.repo_path, f))]
|
<commit_before>import os
from os import path
import glob
import json
import subprocess
class Snippet(object):
def __init__(self, config, username, snippet_id):
self.config = config
self.username = username
self.snippet_id = snippet_id
repo_parent = path.join(self.config.get('snippet_home'), username)
self.repo_path = glob.glob(path.join(repo_parent, '*{}'.format(self.snippet_id)))[0]
@staticmethod
def clone(url, clone_to):
#TODO: Add log line for notifying user.
# subprocess.DEVNULL
subprocess.call(['git', 'clone', url, clone_to])
@staticmethod
def pull(repo_dir):
# TODO: Add log line for notifying user.
subprocess.call(['git', '--git-dir={}/.git'.format(repo_dir), 'pull'])
def get_files(self):
metadata_file = path.join(self.config.get('snippet_home'), 'metadata.json')
with open(metadata_file, 'r') as f:
data = json.loads(f.read())
for item in data['values']:
if item['id'] != self.snippet_id:
continue
return [f for f in os.listdir(self.repo_path) if path.isfile(path.join(self.repo_path, f))]
<commit_msg>Add doc string to Snippet.get_files<commit_after>import os
from os import path
import glob
import json
import subprocess
class Snippet(object):
def __init__(self, config, username, snippet_id):
self.config = config
self.username = username
self.snippet_id = snippet_id
repo_parent = path.join(self.config.get('snippet_home'), username)
self.repo_path = glob.glob(path.join(repo_parent, '*{}'.format(self.snippet_id)))[0]
@staticmethod
def clone(url, clone_to):
#TODO: Add log line for notifying user.
# subprocess.DEVNULL
subprocess.call(['git', 'clone', url, clone_to])
@staticmethod
def pull(repo_dir):
# TODO: Add log line for notifying user.
subprocess.call(['git', '--git-dir={}/.git'.format(repo_dir), 'pull'])
def get_files(self):
""" Return files of snippet """
metadata_file = path.join(self.config.get('snippet_home'), 'metadata.json')
with open(metadata_file, 'r') as f:
data = json.loads(f.read())
for item in data['values']:
if item['id'] != self.snippet_id:
continue
return [f for f in os.listdir(self.repo_path) if path.isfile(path.join(self.repo_path, f))]
|
9d2124e81e62ab508de197aac4f29193ef15d4d2
|
requirejs/utils.py
|
requirejs/utils.py
|
from django.conf import settings
import django
def get_app_template_dirs():
if django.VERSION < (1, 8):
# noinspection PyUnresolvedReferences
from django.template.loaders.app_directories import app_template_dirs
else: # Django 1.8's template loader is refactored
# noinspection PyUnresolvedReferences
from django.template.utils import get_app_template_dirs
app_template_dirs = get_app_template_dirs('templates')
return app_template_dirs
def is_app_installed(label):
"""
Check if app is installed into the Django app cache.
"""
if django.VERSION >= (1, 7):
from django.apps import apps
return apps.is_installed(label)
else:
return label in settings.INSTALLED_APPS
def get_installed_app_labels():
if django.VERSION >= (1, 7):
from django.apps import apps
return [app.label for app in apps.get_app_configs()]
else:
return [app.split('.')[-1] for app in settings.INSTALLED_APPS]
|
from django.conf import settings
import django
def get_app_template_dirs():
if django.VERSION < (1, 8):
# noinspection PyUnresolvedReferences
from django.template.loaders.app_directories import app_template_dirs
else: # Django 1.8's template loader is refactored
# noinspection PyUnresolvedReferences
from django.template.utils import get_app_template_dirs
app_template_dirs = get_app_template_dirs('templates')
return list(app_template_dirs)
def is_app_installed(label):
"""
Check if app is installed into the Django app cache.
"""
if django.VERSION >= (1, 7):
from django.apps import apps
return apps.is_installed(label)
else:
return label in settings.INSTALLED_APPS
def get_installed_app_labels():
if django.VERSION >= (1, 7):
from django.apps import apps
return [app.label for app in apps.get_app_configs()]
else:
return [app.split('.')[-1] for app in settings.INSTALLED_APPS]
|
Fix list concatenation error in Django 1.6
|
Fix list concatenation error in Django 1.6
`app_template_dirs` returns a Tuple which cannot be concatenated with
List.
|
Python
|
mit
|
bpeschier/django-compressor-requirejs,bpeschier/django-compressor-requirejs
|
from django.conf import settings
import django
def get_app_template_dirs():
if django.VERSION < (1, 8):
# noinspection PyUnresolvedReferences
from django.template.loaders.app_directories import app_template_dirs
else: # Django 1.8's template loader is refactored
# noinspection PyUnresolvedReferences
from django.template.utils import get_app_template_dirs
app_template_dirs = get_app_template_dirs('templates')
return app_template_dirs
def is_app_installed(label):
"""
Check if app is installed into the Django app cache.
"""
if django.VERSION >= (1, 7):
from django.apps import apps
return apps.is_installed(label)
else:
return label in settings.INSTALLED_APPS
def get_installed_app_labels():
if django.VERSION >= (1, 7):
from django.apps import apps
return [app.label for app in apps.get_app_configs()]
else:
return [app.split('.')[-1] for app in settings.INSTALLED_APPS]
Fix list concatenation error in Django 1.6
`app_template_dirs` returns a Tuple which cannot be concatenated with
List.
|
from django.conf import settings
import django
def get_app_template_dirs():
if django.VERSION < (1, 8):
# noinspection PyUnresolvedReferences
from django.template.loaders.app_directories import app_template_dirs
else: # Django 1.8's template loader is refactored
# noinspection PyUnresolvedReferences
from django.template.utils import get_app_template_dirs
app_template_dirs = get_app_template_dirs('templates')
return list(app_template_dirs)
def is_app_installed(label):
"""
Check if app is installed into the Django app cache.
"""
if django.VERSION >= (1, 7):
from django.apps import apps
return apps.is_installed(label)
else:
return label in settings.INSTALLED_APPS
def get_installed_app_labels():
if django.VERSION >= (1, 7):
from django.apps import apps
return [app.label for app in apps.get_app_configs()]
else:
return [app.split('.')[-1] for app in settings.INSTALLED_APPS]
|
<commit_before>from django.conf import settings
import django
def get_app_template_dirs():
if django.VERSION < (1, 8):
# noinspection PyUnresolvedReferences
from django.template.loaders.app_directories import app_template_dirs
else: # Django 1.8's template loader is refactored
# noinspection PyUnresolvedReferences
from django.template.utils import get_app_template_dirs
app_template_dirs = get_app_template_dirs('templates')
return app_template_dirs
def is_app_installed(label):
"""
Check if app is installed into the Django app cache.
"""
if django.VERSION >= (1, 7):
from django.apps import apps
return apps.is_installed(label)
else:
return label in settings.INSTALLED_APPS
def get_installed_app_labels():
if django.VERSION >= (1, 7):
from django.apps import apps
return [app.label for app in apps.get_app_configs()]
else:
return [app.split('.')[-1] for app in settings.INSTALLED_APPS]
<commit_msg>Fix list concatenation error in Django 1.6
`app_template_dirs` returns a Tuple which cannot be concatenated with
List.<commit_after>
|
from django.conf import settings
import django
def get_app_template_dirs():
if django.VERSION < (1, 8):
# noinspection PyUnresolvedReferences
from django.template.loaders.app_directories import app_template_dirs
else: # Django 1.8's template loader is refactored
# noinspection PyUnresolvedReferences
from django.template.utils import get_app_template_dirs
app_template_dirs = get_app_template_dirs('templates')
return list(app_template_dirs)
def is_app_installed(label):
"""
Check if app is installed into the Django app cache.
"""
if django.VERSION >= (1, 7):
from django.apps import apps
return apps.is_installed(label)
else:
return label in settings.INSTALLED_APPS
def get_installed_app_labels():
if django.VERSION >= (1, 7):
from django.apps import apps
return [app.label for app in apps.get_app_configs()]
else:
return [app.split('.')[-1] for app in settings.INSTALLED_APPS]
|
from django.conf import settings
import django
def get_app_template_dirs():
if django.VERSION < (1, 8):
# noinspection PyUnresolvedReferences
from django.template.loaders.app_directories import app_template_dirs
else: # Django 1.8's template loader is refactored
# noinspection PyUnresolvedReferences
from django.template.utils import get_app_template_dirs
app_template_dirs = get_app_template_dirs('templates')
return app_template_dirs
def is_app_installed(label):
"""
Check if app is installed into the Django app cache.
"""
if django.VERSION >= (1, 7):
from django.apps import apps
return apps.is_installed(label)
else:
return label in settings.INSTALLED_APPS
def get_installed_app_labels():
if django.VERSION >= (1, 7):
from django.apps import apps
return [app.label for app in apps.get_app_configs()]
else:
return [app.split('.')[-1] for app in settings.INSTALLED_APPS]
Fix list concatenation error in Django 1.6
`app_template_dirs` returns a Tuple which cannot be concatenated with
List.from django.conf import settings
import django
def get_app_template_dirs():
if django.VERSION < (1, 8):
# noinspection PyUnresolvedReferences
from django.template.loaders.app_directories import app_template_dirs
else: # Django 1.8's template loader is refactored
# noinspection PyUnresolvedReferences
from django.template.utils import get_app_template_dirs
app_template_dirs = get_app_template_dirs('templates')
return list(app_template_dirs)
def is_app_installed(label):
"""
Check if app is installed into the Django app cache.
"""
if django.VERSION >= (1, 7):
from django.apps import apps
return apps.is_installed(label)
else:
return label in settings.INSTALLED_APPS
def get_installed_app_labels():
if django.VERSION >= (1, 7):
from django.apps import apps
return [app.label for app in apps.get_app_configs()]
else:
return [app.split('.')[-1] for app in settings.INSTALLED_APPS]
|
<commit_before>from django.conf import settings
import django
def get_app_template_dirs():
if django.VERSION < (1, 8):
# noinspection PyUnresolvedReferences
from django.template.loaders.app_directories import app_template_dirs
else: # Django 1.8's template loader is refactored
# noinspection PyUnresolvedReferences
from django.template.utils import get_app_template_dirs
app_template_dirs = get_app_template_dirs('templates')
return app_template_dirs
def is_app_installed(label):
"""
Check if app is installed into the Django app cache.
"""
if django.VERSION >= (1, 7):
from django.apps import apps
return apps.is_installed(label)
else:
return label in settings.INSTALLED_APPS
def get_installed_app_labels():
if django.VERSION >= (1, 7):
from django.apps import apps
return [app.label for app in apps.get_app_configs()]
else:
return [app.split('.')[-1] for app in settings.INSTALLED_APPS]
<commit_msg>Fix list concatenation error in Django 1.6
`app_template_dirs` returns a Tuple which cannot be concatenated with
List.<commit_after>from django.conf import settings
import django
def get_app_template_dirs():
if django.VERSION < (1, 8):
# noinspection PyUnresolvedReferences
from django.template.loaders.app_directories import app_template_dirs
else: # Django 1.8's template loader is refactored
# noinspection PyUnresolvedReferences
from django.template.utils import get_app_template_dirs
app_template_dirs = get_app_template_dirs('templates')
return list(app_template_dirs)
def is_app_installed(label):
"""
Check if app is installed into the Django app cache.
"""
if django.VERSION >= (1, 7):
from django.apps import apps
return apps.is_installed(label)
else:
return label in settings.INSTALLED_APPS
def get_installed_app_labels():
if django.VERSION >= (1, 7):
from django.apps import apps
return [app.label for app in apps.get_app_configs()]
else:
return [app.split('.')[-1] for app in settings.INSTALLED_APPS]
|
bc15058cc95916788250d660d5560b69a82e0b89
|
warehouse/__main__.py
|
warehouse/__main__.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from warehouse import script
def main():
script.run()
if __name__ == "__main__":
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import sys
from flask.ext.script import InvalidCommand # pylint: disable=E0611,F0401
from warehouse import script
def main():
# This is copied over from script.run and modified for Warehouse
try:
try:
command = sys.argv[1]
except IndexError:
raise InvalidCommand("Please provide a command:")
return script.handle("warehouse", command, sys.argv[2:])
except InvalidCommand as exc:
print exc
script.print_usage()
return 1
if __name__ == "__main__":
sys.exit(main())
|
Customize the command runner for cleaner output
|
Customize the command runner for cleaner output
|
Python
|
bsd-2-clause
|
davidfischer/warehouse
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from warehouse import script
def main():
script.run()
if __name__ == "__main__":
main()
Customize the command runner for cleaner output
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import sys
from flask.ext.script import InvalidCommand # pylint: disable=E0611,F0401
from warehouse import script
def main():
# This is copied over from script.run and modified for Warehouse
try:
try:
command = sys.argv[1]
except IndexError:
raise InvalidCommand("Please provide a command:")
return script.handle("warehouse", command, sys.argv[2:])
except InvalidCommand as exc:
print exc
script.print_usage()
return 1
if __name__ == "__main__":
sys.exit(main())
|
<commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from warehouse import script
def main():
script.run()
if __name__ == "__main__":
main()
<commit_msg>Customize the command runner for cleaner output<commit_after>
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import sys
from flask.ext.script import InvalidCommand # pylint: disable=E0611,F0401
from warehouse import script
def main():
# This is copied over from script.run and modified for Warehouse
try:
try:
command = sys.argv[1]
except IndexError:
raise InvalidCommand("Please provide a command:")
return script.handle("warehouse", command, sys.argv[2:])
except InvalidCommand as exc:
print exc
script.print_usage()
return 1
if __name__ == "__main__":
sys.exit(main())
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from warehouse import script
def main():
script.run()
if __name__ == "__main__":
main()
Customize the command runner for cleaner outputfrom __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import sys
from flask.ext.script import InvalidCommand # pylint: disable=E0611,F0401
from warehouse import script
def main():
# This is copied over from script.run and modified for Warehouse
try:
try:
command = sys.argv[1]
except IndexError:
raise InvalidCommand("Please provide a command:")
return script.handle("warehouse", command, sys.argv[2:])
except InvalidCommand as exc:
print exc
script.print_usage()
return 1
if __name__ == "__main__":
sys.exit(main())
|
<commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from warehouse import script
def main():
script.run()
if __name__ == "__main__":
main()
<commit_msg>Customize the command runner for cleaner output<commit_after>from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import sys
from flask.ext.script import InvalidCommand # pylint: disable=E0611,F0401
from warehouse import script
def main():
# This is copied over from script.run and modified for Warehouse
try:
try:
command = sys.argv[1]
except IndexError:
raise InvalidCommand("Please provide a command:")
return script.handle("warehouse", command, sys.argv[2:])
except InvalidCommand as exc:
print exc
script.print_usage()
return 1
if __name__ == "__main__":
sys.exit(main())
|
0f1f7963c2ea80604593644e1c04643031561970
|
app/timetables/migrations/0004_course.py
|
app/timetables/migrations/0004_course.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-30 19:59
from __future__ import unicode_literals
import common.mixins
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('timetables', '0003_mealoption'),
]
operations = [
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=150, unique=True)),
],
bases=(common.mixins.ForceCapitalizeMixin, models.Model),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-30 19:59
from __future__ import unicode_literals
import common.mixins
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('timetables', '0003_mealoption'),
]
operations = [
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=150, unique=True)),
],
bases=(common.mixins.SlugifyMixin, models.Model),
),
]
|
Remove reference to ForceCapitalizeMixin from migration file and update with SlugifyMixin
|
Remove reference to ForceCapitalizeMixin from migration file and update with SlugifyMixin
|
Python
|
mit
|
teamtaverna/core
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-30 19:59
from __future__ import unicode_literals
import common.mixins
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('timetables', '0003_mealoption'),
]
operations = [
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=150, unique=True)),
],
bases=(common.mixins.ForceCapitalizeMixin, models.Model),
),
]
Remove reference to ForceCapitalizeMixin from migration file and update with SlugifyMixin
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-30 19:59
from __future__ import unicode_literals
import common.mixins
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('timetables', '0003_mealoption'),
]
operations = [
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=150, unique=True)),
],
bases=(common.mixins.SlugifyMixin, models.Model),
),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-30 19:59
from __future__ import unicode_literals
import common.mixins
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('timetables', '0003_mealoption'),
]
operations = [
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=150, unique=True)),
],
bases=(common.mixins.ForceCapitalizeMixin, models.Model),
),
]
<commit_msg>Remove reference to ForceCapitalizeMixin from migration file and update with SlugifyMixin<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-30 19:59
from __future__ import unicode_literals
import common.mixins
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('timetables', '0003_mealoption'),
]
operations = [
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=150, unique=True)),
],
bases=(common.mixins.SlugifyMixin, models.Model),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-30 19:59
from __future__ import unicode_literals
import common.mixins
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('timetables', '0003_mealoption'),
]
operations = [
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=150, unique=True)),
],
bases=(common.mixins.ForceCapitalizeMixin, models.Model),
),
]
Remove reference to ForceCapitalizeMixin from migration file and update with SlugifyMixin# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-30 19:59
from __future__ import unicode_literals
import common.mixins
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('timetables', '0003_mealoption'),
]
operations = [
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=150, unique=True)),
],
bases=(common.mixins.SlugifyMixin, models.Model),
),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-30 19:59
from __future__ import unicode_literals
import common.mixins
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('timetables', '0003_mealoption'),
]
operations = [
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=150, unique=True)),
],
bases=(common.mixins.ForceCapitalizeMixin, models.Model),
),
]
<commit_msg>Remove reference to ForceCapitalizeMixin from migration file and update with SlugifyMixin<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-30 19:59
from __future__ import unicode_literals
import common.mixins
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('timetables', '0003_mealoption'),
]
operations = [
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=150, unique=True)),
],
bases=(common.mixins.SlugifyMixin, models.Model),
),
]
|
a57f7c43bc7749de5acd42b6db95d77074308cef
|
scaper/__init__.py
|
scaper/__init__.py
|
#!/usr/bin/env python
"""Top-level module for scaper"""
from .core import *
__version__ = '0.1.0'
|
#!/usr/bin/env python
"""Top-level module for scaper"""
from .core import *
import jams
from pkg_resources import resource_filename
__version__ = '0.1.0'
# Add sound_event namesapce
namespace_file = resource_filename(__name__, 'namespaces/sound_event.json')
jams.schema.add_namespace(namespace_file)
|
Add sound_event namespace to jams during init
|
Add sound_event namespace to jams during init
|
Python
|
bsd-3-clause
|
justinsalamon/scaper
|
#!/usr/bin/env python
"""Top-level module for scaper"""
from .core import *
__version__ = '0.1.0'
Add sound_event namespace to jams during init
|
#!/usr/bin/env python
"""Top-level module for scaper"""
from .core import *
import jams
from pkg_resources import resource_filename
__version__ = '0.1.0'
# Add sound_event namesapce
namespace_file = resource_filename(__name__, 'namespaces/sound_event.json')
jams.schema.add_namespace(namespace_file)
|
<commit_before>#!/usr/bin/env python
"""Top-level module for scaper"""
from .core import *
__version__ = '0.1.0'
<commit_msg>Add sound_event namespace to jams during init<commit_after>
|
#!/usr/bin/env python
"""Top-level module for scaper"""
from .core import *
import jams
from pkg_resources import resource_filename
__version__ = '0.1.0'
# Add sound_event namesapce
namespace_file = resource_filename(__name__, 'namespaces/sound_event.json')
jams.schema.add_namespace(namespace_file)
|
#!/usr/bin/env python
"""Top-level module for scaper"""
from .core import *
__version__ = '0.1.0'
Add sound_event namespace to jams during init#!/usr/bin/env python
"""Top-level module for scaper"""
from .core import *
import jams
from pkg_resources import resource_filename
__version__ = '0.1.0'
# Add sound_event namesapce
namespace_file = resource_filename(__name__, 'namespaces/sound_event.json')
jams.schema.add_namespace(namespace_file)
|
<commit_before>#!/usr/bin/env python
"""Top-level module for scaper"""
from .core import *
__version__ = '0.1.0'
<commit_msg>Add sound_event namespace to jams during init<commit_after>#!/usr/bin/env python
"""Top-level module for scaper"""
from .core import *
import jams
from pkg_resources import resource_filename
__version__ = '0.1.0'
# Add sound_event namesapce
namespace_file = resource_filename(__name__, 'namespaces/sound_event.json')
jams.schema.add_namespace(namespace_file)
|
4c50bd3088451a8d0c81d651f287c1e4652aea8d
|
app/gdn/manage.py
|
app/gdn/manage.py
|
from flask.ext.script import Manager
from flask.ext.migrate import MigrateCommand
from models import *
from . import app
manager = Manager(app)
manager.add_command('db', MigrateCommand)
@manager.command
def run():
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(app.config['HTTP_PORT'])
IOLoop.instance().start()
def debug():
app.run(debug = app.config['DEBUG'], host=app.config['HTTP_HOST'], port=app.config['HTTP_PORT'])
@manager.command
def load():
from loader import loader
loader.load()
|
from flask.ext.script import Manager
from flask.ext.migrate import MigrateCommand
from models import *
from . import app
manager = Manager(app)
manager.add_command('db', MigrateCommand)
@manager.command
def run():
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(app.config['HTTP_PORT'])
IOLoop.instance().start()
@manager.command
def debug():
app.run(debug = app.config['DEBUG'], host=app.config['HTTP_HOST'], port=app.config['HTTP_PORT'])
@manager.command
def load():
from loader import loader
loader.load()
|
Fix debug server not working
|
Fix debug server not working
|
Python
|
mpl-2.0
|
MCProHosting/SpaceGDN,MCProHosting/SpaceGDN,XereoNet/SpaceGDN,MCProHosting/SpaceGDN,XereoNet/SpaceGDN,XereoNet/SpaceGDN
|
from flask.ext.script import Manager
from flask.ext.migrate import MigrateCommand
from models import *
from . import app
manager = Manager(app)
manager.add_command('db', MigrateCommand)
@manager.command
def run():
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(app.config['HTTP_PORT'])
IOLoop.instance().start()
def debug():
app.run(debug = app.config['DEBUG'], host=app.config['HTTP_HOST'], port=app.config['HTTP_PORT'])
@manager.command
def load():
from loader import loader
loader.load()Fix debug server not working
|
from flask.ext.script import Manager
from flask.ext.migrate import MigrateCommand
from models import *
from . import app
manager = Manager(app)
manager.add_command('db', MigrateCommand)
@manager.command
def run():
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(app.config['HTTP_PORT'])
IOLoop.instance().start()
@manager.command
def debug():
app.run(debug = app.config['DEBUG'], host=app.config['HTTP_HOST'], port=app.config['HTTP_PORT'])
@manager.command
def load():
from loader import loader
loader.load()
|
<commit_before>from flask.ext.script import Manager
from flask.ext.migrate import MigrateCommand
from models import *
from . import app
manager = Manager(app)
manager.add_command('db', MigrateCommand)
@manager.command
def run():
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(app.config['HTTP_PORT'])
IOLoop.instance().start()
def debug():
app.run(debug = app.config['DEBUG'], host=app.config['HTTP_HOST'], port=app.config['HTTP_PORT'])
@manager.command
def load():
from loader import loader
loader.load()<commit_msg>Fix debug server not working<commit_after>
|
from flask.ext.script import Manager
from flask.ext.migrate import MigrateCommand
from models import *
from . import app
manager = Manager(app)
manager.add_command('db', MigrateCommand)
@manager.command
def run():
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(app.config['HTTP_PORT'])
IOLoop.instance().start()
@manager.command
def debug():
app.run(debug = app.config['DEBUG'], host=app.config['HTTP_HOST'], port=app.config['HTTP_PORT'])
@manager.command
def load():
from loader import loader
loader.load()
|
from flask.ext.script import Manager
from flask.ext.migrate import MigrateCommand
from models import *
from . import app
manager = Manager(app)
manager.add_command('db', MigrateCommand)
@manager.command
def run():
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(app.config['HTTP_PORT'])
IOLoop.instance().start()
def debug():
app.run(debug = app.config['DEBUG'], host=app.config['HTTP_HOST'], port=app.config['HTTP_PORT'])
@manager.command
def load():
from loader import loader
loader.load()Fix debug server not workingfrom flask.ext.script import Manager
from flask.ext.migrate import MigrateCommand
from models import *
from . import app
manager = Manager(app)
manager.add_command('db', MigrateCommand)
@manager.command
def run():
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(app.config['HTTP_PORT'])
IOLoop.instance().start()
@manager.command
def debug():
app.run(debug = app.config['DEBUG'], host=app.config['HTTP_HOST'], port=app.config['HTTP_PORT'])
@manager.command
def load():
from loader import loader
loader.load()
|
<commit_before>from flask.ext.script import Manager
from flask.ext.migrate import MigrateCommand
from models import *
from . import app
manager = Manager(app)
manager.add_command('db', MigrateCommand)
@manager.command
def run():
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(app.config['HTTP_PORT'])
IOLoop.instance().start()
def debug():
app.run(debug = app.config['DEBUG'], host=app.config['HTTP_HOST'], port=app.config['HTTP_PORT'])
@manager.command
def load():
from loader import loader
loader.load()<commit_msg>Fix debug server not working<commit_after>from flask.ext.script import Manager
from flask.ext.migrate import MigrateCommand
from models import *
from . import app
manager = Manager(app)
manager.add_command('db', MigrateCommand)
@manager.command
def run():
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(app.config['HTTP_PORT'])
IOLoop.instance().start()
@manager.command
def debug():
app.run(debug = app.config['DEBUG'], host=app.config['HTTP_HOST'], port=app.config['HTTP_PORT'])
@manager.command
def load():
from loader import loader
loader.load()
|
b62c8c905cdd332a0073ce462be3e5c5b17b282d
|
api/webview/views.py
|
api/webview/views.py
|
from rest_framework import generics
from rest_framework import permissions
from rest_framework.response import Response
from rest_framework.decorators import api_view
from django.views.decorators.clickjacking import xframe_options_exempt
from api.webview.models import Document
from api.webview.serializers import DocumentSerializer
class DocumentList(generics.ListCreateAPIView):
"""
List all documents in the SHARE API
"""
serializer_class = DocumentSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def perform_create(self, serializer):
serializer.save(source=self.request.user)
def get_queryset(self):
""" Return all documents
"""
return Document.objects.all()
class DocumentsFromSource(generics.ListCreateAPIView):
"""
List all documents from a particular source
"""
serializer_class = DocumentSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def perform_create(self, serializer):
serializer.save(source=self.request.user)
def get_queryset(self):
""" Return queryset based on source
"""
return Document.objects.filter(source=self.kwargs['source'])
@api_view(['GET'])
@xframe_options_exempt
def document_detail(request, source, docID):
"""
Retrieve one particular document.
"""
try:
all_sources = Document.objects.filter(source=source)
document = all_sources.get(docID=docID)
except Document.DoesNotExist:
return Response(status=404)
serializer = DocumentSerializer(document)
return Response(serializer.data)
|
from rest_framework import generics
from rest_framework import permissions
from rest_framework.response import Response
from rest_framework.decorators import api_view
from django.views.decorators.clickjacking import xframe_options_exempt
from api.webview.models import Document
from api.webview.serializers import DocumentSerializer
class DocumentList(generics.ListAPIView):
"""
List all documents in the SHARE API
"""
serializer_class = DocumentSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def perform_create(self, serializer):
serializer.save(source=self.request.user)
def get_queryset(self):
""" Return all documents
"""
return Document.objects.all()
class DocumentsFromSource(generics.ListAPIView):
"""
List all documents from a particular source
"""
serializer_class = DocumentSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def perform_create(self, serializer):
serializer.save(source=self.request.user)
def get_queryset(self):
""" Return queryset based on source
"""
return Document.objects.filter(source=self.kwargs['source'])
@api_view(['GET'])
@xframe_options_exempt
def document_detail(request, source, docID):
"""
Retrieve one particular document.
"""
try:
all_sources = Document.objects.filter(source=source)
document = all_sources.get(docID=docID)
except Document.DoesNotExist:
return Response(status=404)
serializer = DocumentSerializer(document)
return Response(serializer.data)
|
Make the view List only remove Create
|
Make the view List only remove Create
|
Python
|
apache-2.0
|
erinspace/scrapi,CenterForOpenScience/scrapi,felliott/scrapi,fabianvf/scrapi,erinspace/scrapi,CenterForOpenScience/scrapi,fabianvf/scrapi,felliott/scrapi
|
from rest_framework import generics
from rest_framework import permissions
from rest_framework.response import Response
from rest_framework.decorators import api_view
from django.views.decorators.clickjacking import xframe_options_exempt
from api.webview.models import Document
from api.webview.serializers import DocumentSerializer
class DocumentList(generics.ListCreateAPIView):
"""
List all documents in the SHARE API
"""
serializer_class = DocumentSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def perform_create(self, serializer):
serializer.save(source=self.request.user)
def get_queryset(self):
""" Return all documents
"""
return Document.objects.all()
class DocumentsFromSource(generics.ListCreateAPIView):
"""
List all documents from a particular source
"""
serializer_class = DocumentSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def perform_create(self, serializer):
serializer.save(source=self.request.user)
def get_queryset(self):
""" Return queryset based on source
"""
return Document.objects.filter(source=self.kwargs['source'])
@api_view(['GET'])
@xframe_options_exempt
def document_detail(request, source, docID):
"""
Retrieve one particular document.
"""
try:
all_sources = Document.objects.filter(source=source)
document = all_sources.get(docID=docID)
except Document.DoesNotExist:
return Response(status=404)
serializer = DocumentSerializer(document)
return Response(serializer.data)
Make the view List only remove Create
|
from rest_framework import generics
from rest_framework import permissions
from rest_framework.response import Response
from rest_framework.decorators import api_view
from django.views.decorators.clickjacking import xframe_options_exempt
from api.webview.models import Document
from api.webview.serializers import DocumentSerializer
class DocumentList(generics.ListAPIView):
"""
List all documents in the SHARE API
"""
serializer_class = DocumentSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def perform_create(self, serializer):
serializer.save(source=self.request.user)
def get_queryset(self):
""" Return all documents
"""
return Document.objects.all()
class DocumentsFromSource(generics.ListAPIView):
"""
List all documents from a particular source
"""
serializer_class = DocumentSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def perform_create(self, serializer):
serializer.save(source=self.request.user)
def get_queryset(self):
""" Return queryset based on source
"""
return Document.objects.filter(source=self.kwargs['source'])
@api_view(['GET'])
@xframe_options_exempt
def document_detail(request, source, docID):
"""
Retrieve one particular document.
"""
try:
all_sources = Document.objects.filter(source=source)
document = all_sources.get(docID=docID)
except Document.DoesNotExist:
return Response(status=404)
serializer = DocumentSerializer(document)
return Response(serializer.data)
|
<commit_before>from rest_framework import generics
from rest_framework import permissions
from rest_framework.response import Response
from rest_framework.decorators import api_view
from django.views.decorators.clickjacking import xframe_options_exempt
from api.webview.models import Document
from api.webview.serializers import DocumentSerializer
class DocumentList(generics.ListCreateAPIView):
"""
List all documents in the SHARE API
"""
serializer_class = DocumentSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def perform_create(self, serializer):
serializer.save(source=self.request.user)
def get_queryset(self):
""" Return all documents
"""
return Document.objects.all()
class DocumentsFromSource(generics.ListCreateAPIView):
"""
List all documents from a particular source
"""
serializer_class = DocumentSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def perform_create(self, serializer):
serializer.save(source=self.request.user)
def get_queryset(self):
""" Return queryset based on source
"""
return Document.objects.filter(source=self.kwargs['source'])
@api_view(['GET'])
@xframe_options_exempt
def document_detail(request, source, docID):
"""
Retrieve one particular document.
"""
try:
all_sources = Document.objects.filter(source=source)
document = all_sources.get(docID=docID)
except Document.DoesNotExist:
return Response(status=404)
serializer = DocumentSerializer(document)
return Response(serializer.data)
<commit_msg>Make the view List only remove Create<commit_after>
|
from rest_framework import generics
from rest_framework import permissions
from rest_framework.response import Response
from rest_framework.decorators import api_view
from django.views.decorators.clickjacking import xframe_options_exempt
from api.webview.models import Document
from api.webview.serializers import DocumentSerializer
class DocumentList(generics.ListAPIView):
"""
List all documents in the SHARE API
"""
serializer_class = DocumentSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def perform_create(self, serializer):
serializer.save(source=self.request.user)
def get_queryset(self):
""" Return all documents
"""
return Document.objects.all()
class DocumentsFromSource(generics.ListAPIView):
"""
List all documents from a particular source
"""
serializer_class = DocumentSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def perform_create(self, serializer):
serializer.save(source=self.request.user)
def get_queryset(self):
""" Return queryset based on source
"""
return Document.objects.filter(source=self.kwargs['source'])
@api_view(['GET'])
@xframe_options_exempt
def document_detail(request, source, docID):
"""
Retrieve one particular document.
"""
try:
all_sources = Document.objects.filter(source=source)
document = all_sources.get(docID=docID)
except Document.DoesNotExist:
return Response(status=404)
serializer = DocumentSerializer(document)
return Response(serializer.data)
|
from rest_framework import generics
from rest_framework import permissions
from rest_framework.response import Response
from rest_framework.decorators import api_view
from django.views.decorators.clickjacking import xframe_options_exempt
from api.webview.models import Document
from api.webview.serializers import DocumentSerializer
class DocumentList(generics.ListCreateAPIView):
"""
List all documents in the SHARE API
"""
serializer_class = DocumentSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def perform_create(self, serializer):
serializer.save(source=self.request.user)
def get_queryset(self):
""" Return all documents
"""
return Document.objects.all()
class DocumentsFromSource(generics.ListCreateAPIView):
"""
List all documents from a particular source
"""
serializer_class = DocumentSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def perform_create(self, serializer):
serializer.save(source=self.request.user)
def get_queryset(self):
""" Return queryset based on source
"""
return Document.objects.filter(source=self.kwargs['source'])
@api_view(['GET'])
@xframe_options_exempt
def document_detail(request, source, docID):
"""
Retrieve one particular document.
"""
try:
all_sources = Document.objects.filter(source=source)
document = all_sources.get(docID=docID)
except Document.DoesNotExist:
return Response(status=404)
serializer = DocumentSerializer(document)
return Response(serializer.data)
Make the view List only remove Createfrom rest_framework import generics
from rest_framework import permissions
from rest_framework.response import Response
from rest_framework.decorators import api_view
from django.views.decorators.clickjacking import xframe_options_exempt
from api.webview.models import Document
from api.webview.serializers import DocumentSerializer
class DocumentList(generics.ListAPIView):
"""
List all documents in the SHARE API
"""
serializer_class = DocumentSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def perform_create(self, serializer):
serializer.save(source=self.request.user)
def get_queryset(self):
""" Return all documents
"""
return Document.objects.all()
class DocumentsFromSource(generics.ListAPIView):
"""
List all documents from a particular source
"""
serializer_class = DocumentSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def perform_create(self, serializer):
serializer.save(source=self.request.user)
def get_queryset(self):
""" Return queryset based on source
"""
return Document.objects.filter(source=self.kwargs['source'])
@api_view(['GET'])
@xframe_options_exempt
def document_detail(request, source, docID):
"""
Retrieve one particular document.
"""
try:
all_sources = Document.objects.filter(source=source)
document = all_sources.get(docID=docID)
except Document.DoesNotExist:
return Response(status=404)
serializer = DocumentSerializer(document)
return Response(serializer.data)
|
<commit_before>from rest_framework import generics
from rest_framework import permissions
from rest_framework.response import Response
from rest_framework.decorators import api_view
from django.views.decorators.clickjacking import xframe_options_exempt
from api.webview.models import Document
from api.webview.serializers import DocumentSerializer
class DocumentList(generics.ListCreateAPIView):
"""
List all documents in the SHARE API
"""
serializer_class = DocumentSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def perform_create(self, serializer):
serializer.save(source=self.request.user)
def get_queryset(self):
""" Return all documents
"""
return Document.objects.all()
class DocumentsFromSource(generics.ListCreateAPIView):
"""
List all documents from a particular source
"""
serializer_class = DocumentSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def perform_create(self, serializer):
serializer.save(source=self.request.user)
def get_queryset(self):
""" Return queryset based on source
"""
return Document.objects.filter(source=self.kwargs['source'])
@api_view(['GET'])
@xframe_options_exempt
def document_detail(request, source, docID):
"""
Retrieve one particular document.
"""
try:
all_sources = Document.objects.filter(source=source)
document = all_sources.get(docID=docID)
except Document.DoesNotExist:
return Response(status=404)
serializer = DocumentSerializer(document)
return Response(serializer.data)
<commit_msg>Make the view List only remove Create<commit_after>from rest_framework import generics
from rest_framework import permissions
from rest_framework.response import Response
from rest_framework.decorators import api_view
from django.views.decorators.clickjacking import xframe_options_exempt
from api.webview.models import Document
from api.webview.serializers import DocumentSerializer
class DocumentList(generics.ListAPIView):
"""
List all documents in the SHARE API
"""
serializer_class = DocumentSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def perform_create(self, serializer):
serializer.save(source=self.request.user)
def get_queryset(self):
""" Return all documents
"""
return Document.objects.all()
class DocumentsFromSource(generics.ListAPIView):
"""
List all documents from a particular source
"""
serializer_class = DocumentSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def perform_create(self, serializer):
serializer.save(source=self.request.user)
def get_queryset(self):
""" Return queryset based on source
"""
return Document.objects.filter(source=self.kwargs['source'])
@api_view(['GET'])
@xframe_options_exempt
def document_detail(request, source, docID):
"""
Retrieve one particular document.
"""
try:
all_sources = Document.objects.filter(source=source)
document = all_sources.get(docID=docID)
except Document.DoesNotExist:
return Response(status=404)
serializer = DocumentSerializer(document)
return Response(serializer.data)
|
067b557258a85945635a880ced65454cfa2b61af
|
supermega/tests/test_session.py
|
supermega/tests/test_session.py
|
import unittest
import hashlib
from .. import Session
from .. import models
class TestSession(unittest.TestCase):
def setUp(self):
self.sess = Session()
def test_public_file_download(self):
url = 'https://mega.co.nz/#!2ctGgQAI!AkJMowjRiXVcSrRLn3d-e1vl47ZxZEK0CbrHGIKFY-E'
sha256 = '9431103cb989f2913cbc503767015ca22c0ae40942932186c59ffe6d6a69830d'
hash = hashlib.sha256()
def verify_hash(file, chunks):
for chunk in chunks:
hash.update(chunk)
self.assertEqual(hash.hexdigest(), sha256)
self.sess.download(verify_hash, url)
def test_ephemeral_account(self):
sess = self.sess
user = models.User(sess)
user.ephemeral()
sess.init_datastore()
|
import unittest
import hashlib
from .. import Session
from .. import models
class TestSession(unittest.TestCase):
def setUp(self):
self.sess = Session()
def test_public_file_download(self):
url = 'https://mega.co.nz/#!2ctGgQAI!AkJMowjRiXVcSrRLn3d-e1vl47ZxZEK0CbrHGIKFY-E'
sha256 = '9431103cb989f2913cbc503767015ca22c0ae40942932186c59ffe6d6a69830d'
hash = hashlib.sha256()
def verify_hash(file, chunks):
for chunk in chunks:
hash.update(chunk)
self.assertEqual(hash.hexdigest(), sha256)
self.sess.download(verify_hash, url)
def test_ephemeral_account(self):
sess = self.sess
user = models.User(sess)
user.ephemeral()
sess.init_datastore()
def test_key_derivation(self):
self.assertEqual(models.User.derive_key("password"), 'd\x039r^n\xbd\x13\xa2_\x00R\x12\x9f|\xb1')
|
Add test for key derivation
|
Add test for key derivation
|
Python
|
bsd-3-clause
|
lmb/Supermega
|
import unittest
import hashlib
from .. import Session
from .. import models
class TestSession(unittest.TestCase):
def setUp(self):
self.sess = Session()
def test_public_file_download(self):
url = 'https://mega.co.nz/#!2ctGgQAI!AkJMowjRiXVcSrRLn3d-e1vl47ZxZEK0CbrHGIKFY-E'
sha256 = '9431103cb989f2913cbc503767015ca22c0ae40942932186c59ffe6d6a69830d'
hash = hashlib.sha256()
def verify_hash(file, chunks):
for chunk in chunks:
hash.update(chunk)
self.assertEqual(hash.hexdigest(), sha256)
self.sess.download(verify_hash, url)
def test_ephemeral_account(self):
sess = self.sess
user = models.User(sess)
user.ephemeral()
sess.init_datastore()Add test for key derivation
|
import unittest
import hashlib
from .. import Session
from .. import models
class TestSession(unittest.TestCase):
def setUp(self):
self.sess = Session()
def test_public_file_download(self):
url = 'https://mega.co.nz/#!2ctGgQAI!AkJMowjRiXVcSrRLn3d-e1vl47ZxZEK0CbrHGIKFY-E'
sha256 = '9431103cb989f2913cbc503767015ca22c0ae40942932186c59ffe6d6a69830d'
hash = hashlib.sha256()
def verify_hash(file, chunks):
for chunk in chunks:
hash.update(chunk)
self.assertEqual(hash.hexdigest(), sha256)
self.sess.download(verify_hash, url)
def test_ephemeral_account(self):
sess = self.sess
user = models.User(sess)
user.ephemeral()
sess.init_datastore()
def test_key_derivation(self):
self.assertEqual(models.User.derive_key("password"), 'd\x039r^n\xbd\x13\xa2_\x00R\x12\x9f|\xb1')
|
<commit_before>import unittest
import hashlib
from .. import Session
from .. import models
class TestSession(unittest.TestCase):
def setUp(self):
self.sess = Session()
def test_public_file_download(self):
url = 'https://mega.co.nz/#!2ctGgQAI!AkJMowjRiXVcSrRLn3d-e1vl47ZxZEK0CbrHGIKFY-E'
sha256 = '9431103cb989f2913cbc503767015ca22c0ae40942932186c59ffe6d6a69830d'
hash = hashlib.sha256()
def verify_hash(file, chunks):
for chunk in chunks:
hash.update(chunk)
self.assertEqual(hash.hexdigest(), sha256)
self.sess.download(verify_hash, url)
def test_ephemeral_account(self):
sess = self.sess
user = models.User(sess)
user.ephemeral()
sess.init_datastore()<commit_msg>Add test for key derivation<commit_after>
|
import unittest
import hashlib
from .. import Session
from .. import models
class TestSession(unittest.TestCase):
def setUp(self):
self.sess = Session()
def test_public_file_download(self):
url = 'https://mega.co.nz/#!2ctGgQAI!AkJMowjRiXVcSrRLn3d-e1vl47ZxZEK0CbrHGIKFY-E'
sha256 = '9431103cb989f2913cbc503767015ca22c0ae40942932186c59ffe6d6a69830d'
hash = hashlib.sha256()
def verify_hash(file, chunks):
for chunk in chunks:
hash.update(chunk)
self.assertEqual(hash.hexdigest(), sha256)
self.sess.download(verify_hash, url)
def test_ephemeral_account(self):
sess = self.sess
user = models.User(sess)
user.ephemeral()
sess.init_datastore()
def test_key_derivation(self):
self.assertEqual(models.User.derive_key("password"), 'd\x039r^n\xbd\x13\xa2_\x00R\x12\x9f|\xb1')
|
import unittest
import hashlib
from .. import Session
from .. import models
class TestSession(unittest.TestCase):
def setUp(self):
self.sess = Session()
def test_public_file_download(self):
url = 'https://mega.co.nz/#!2ctGgQAI!AkJMowjRiXVcSrRLn3d-e1vl47ZxZEK0CbrHGIKFY-E'
sha256 = '9431103cb989f2913cbc503767015ca22c0ae40942932186c59ffe6d6a69830d'
hash = hashlib.sha256()
def verify_hash(file, chunks):
for chunk in chunks:
hash.update(chunk)
self.assertEqual(hash.hexdigest(), sha256)
self.sess.download(verify_hash, url)
def test_ephemeral_account(self):
sess = self.sess
user = models.User(sess)
user.ephemeral()
sess.init_datastore()Add test for key derivationimport unittest
import hashlib
from .. import Session
from .. import models
class TestSession(unittest.TestCase):
def setUp(self):
self.sess = Session()
def test_public_file_download(self):
url = 'https://mega.co.nz/#!2ctGgQAI!AkJMowjRiXVcSrRLn3d-e1vl47ZxZEK0CbrHGIKFY-E'
sha256 = '9431103cb989f2913cbc503767015ca22c0ae40942932186c59ffe6d6a69830d'
hash = hashlib.sha256()
def verify_hash(file, chunks):
for chunk in chunks:
hash.update(chunk)
self.assertEqual(hash.hexdigest(), sha256)
self.sess.download(verify_hash, url)
def test_ephemeral_account(self):
sess = self.sess
user = models.User(sess)
user.ephemeral()
sess.init_datastore()
def test_key_derivation(self):
self.assertEqual(models.User.derive_key("password"), 'd\x039r^n\xbd\x13\xa2_\x00R\x12\x9f|\xb1')
|
<commit_before>import unittest
import hashlib
from .. import Session
from .. import models
class TestSession(unittest.TestCase):
def setUp(self):
self.sess = Session()
def test_public_file_download(self):
url = 'https://mega.co.nz/#!2ctGgQAI!AkJMowjRiXVcSrRLn3d-e1vl47ZxZEK0CbrHGIKFY-E'
sha256 = '9431103cb989f2913cbc503767015ca22c0ae40942932186c59ffe6d6a69830d'
hash = hashlib.sha256()
def verify_hash(file, chunks):
for chunk in chunks:
hash.update(chunk)
self.assertEqual(hash.hexdigest(), sha256)
self.sess.download(verify_hash, url)
def test_ephemeral_account(self):
sess = self.sess
user = models.User(sess)
user.ephemeral()
sess.init_datastore()<commit_msg>Add test for key derivation<commit_after>import unittest
import hashlib
from .. import Session
from .. import models
class TestSession(unittest.TestCase):
def setUp(self):
self.sess = Session()
def test_public_file_download(self):
url = 'https://mega.co.nz/#!2ctGgQAI!AkJMowjRiXVcSrRLn3d-e1vl47ZxZEK0CbrHGIKFY-E'
sha256 = '9431103cb989f2913cbc503767015ca22c0ae40942932186c59ffe6d6a69830d'
hash = hashlib.sha256()
def verify_hash(file, chunks):
for chunk in chunks:
hash.update(chunk)
self.assertEqual(hash.hexdigest(), sha256)
self.sess.download(verify_hash, url)
def test_ephemeral_account(self):
sess = self.sess
user = models.User(sess)
user.ephemeral()
sess.init_datastore()
def test_key_derivation(self):
self.assertEqual(models.User.derive_key("password"), 'd\x039r^n\xbd\x13\xa2_\x00R\x12\x9f|\xb1')
|
bbfe056602075a46b231dc28ddcada7f525ce927
|
conftest.py
|
conftest.py
|
import pytest
import django_webtest
from django.core.urlresolvers import reverse
from ideasbox.tests.factories import UserFactory
@pytest.fixture()
def user():
return UserFactory(short_name="Hello", password='password')
@pytest.fixture()
def staffuser():
return UserFactory(short_name="Hello", password='password', is_staff=True)
@pytest.fixture()
def app(request):
wtm = django_webtest.WebTestMixin()
wtm._patch_settings()
request.addfinalizer(wtm._unpatch_settings)
return django_webtest.DjangoTestApp()
@pytest.fixture()
def loggedapp(app, user):
"""Return an app with an already logged in user."""
form = app.get(reverse('login')).forms['login']
form['username'] = user.serial
form['password'] = 'password'
form.submit().follow()
setattr(app, 'user', user) # for later use, if needed
return app
@pytest.fixture()
def staffapp(app, staffuser):
"""Return an app with an already logged in staff user."""
form = app.get(reverse('login')).forms['login']
form['username'] = staffuser.serial
form['password'] = 'password'
form.submit().follow()
setattr(app, 'user', user) # for later use, if needed
return app
|
import pytest
import django_webtest
from django.core.urlresolvers import reverse
from ideasbox.tests.factories import UserFactory
@pytest.fixture()
def user():
return UserFactory(short_name="Hello", password='password')
@pytest.fixture()
def staffuser():
return UserFactory(short_name="Hello", password='password', is_staff=True)
@pytest.yield_fixture()
def app(request):
wtm = django_webtest.WebTestMixin()
wtm._patch_settings()
yield django_webtest.DjangoTestApp()
wtm._unpatch_settings()
@pytest.fixture()
def loggedapp(app, user):
"""Return an app with an already logged in user."""
form = app.get(reverse('login')).forms['login']
form['username'] = user.serial
form['password'] = 'password'
form.submit().follow()
setattr(app, 'user', user) # for later use, if needed
return app
@pytest.fixture()
def staffapp(app, staffuser):
"""Return an app with an already logged in staff user."""
form = app.get(reverse('login')).forms['login']
form['username'] = staffuser.serial
form['password'] = 'password'
form.submit().follow()
setattr(app, 'user', user) # for later use, if needed
return app
|
Use yield_fixture for app fixture
|
Use yield_fixture for app fixture
|
Python
|
agpl-3.0
|
ideascube/ideascube,Lcaracol/ideasbox.lan,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,Lcaracol/ideasbox.lan,Lcaracol/ideasbox.lan
|
import pytest
import django_webtest
from django.core.urlresolvers import reverse
from ideasbox.tests.factories import UserFactory
@pytest.fixture()
def user():
return UserFactory(short_name="Hello", password='password')
@pytest.fixture()
def staffuser():
return UserFactory(short_name="Hello", password='password', is_staff=True)
@pytest.fixture()
def app(request):
wtm = django_webtest.WebTestMixin()
wtm._patch_settings()
request.addfinalizer(wtm._unpatch_settings)
return django_webtest.DjangoTestApp()
@pytest.fixture()
def loggedapp(app, user):
"""Return an app with an already logged in user."""
form = app.get(reverse('login')).forms['login']
form['username'] = user.serial
form['password'] = 'password'
form.submit().follow()
setattr(app, 'user', user) # for later use, if needed
return app
@pytest.fixture()
def staffapp(app, staffuser):
"""Return an app with an already logged in staff user."""
form = app.get(reverse('login')).forms['login']
form['username'] = staffuser.serial
form['password'] = 'password'
form.submit().follow()
setattr(app, 'user', user) # for later use, if needed
return app
Use yield_fixture for app fixture
|
import pytest
import django_webtest
from django.core.urlresolvers import reverse
from ideasbox.tests.factories import UserFactory
@pytest.fixture()
def user():
return UserFactory(short_name="Hello", password='password')
@pytest.fixture()
def staffuser():
return UserFactory(short_name="Hello", password='password', is_staff=True)
@pytest.yield_fixture()
def app(request):
wtm = django_webtest.WebTestMixin()
wtm._patch_settings()
yield django_webtest.DjangoTestApp()
wtm._unpatch_settings()
@pytest.fixture()
def loggedapp(app, user):
"""Return an app with an already logged in user."""
form = app.get(reverse('login')).forms['login']
form['username'] = user.serial
form['password'] = 'password'
form.submit().follow()
setattr(app, 'user', user) # for later use, if needed
return app
@pytest.fixture()
def staffapp(app, staffuser):
"""Return an app with an already logged in staff user."""
form = app.get(reverse('login')).forms['login']
form['username'] = staffuser.serial
form['password'] = 'password'
form.submit().follow()
setattr(app, 'user', user) # for later use, if needed
return app
|
<commit_before>import pytest
import django_webtest
from django.core.urlresolvers import reverse
from ideasbox.tests.factories import UserFactory
@pytest.fixture()
def user():
return UserFactory(short_name="Hello", password='password')
@pytest.fixture()
def staffuser():
return UserFactory(short_name="Hello", password='password', is_staff=True)
@pytest.fixture()
def app(request):
wtm = django_webtest.WebTestMixin()
wtm._patch_settings()
request.addfinalizer(wtm._unpatch_settings)
return django_webtest.DjangoTestApp()
@pytest.fixture()
def loggedapp(app, user):
"""Return an app with an already logged in user."""
form = app.get(reverse('login')).forms['login']
form['username'] = user.serial
form['password'] = 'password'
form.submit().follow()
setattr(app, 'user', user) # for later use, if needed
return app
@pytest.fixture()
def staffapp(app, staffuser):
"""Return an app with an already logged in staff user."""
form = app.get(reverse('login')).forms['login']
form['username'] = staffuser.serial
form['password'] = 'password'
form.submit().follow()
setattr(app, 'user', user) # for later use, if needed
return app
<commit_msg>Use yield_fixture for app fixture<commit_after>
|
import pytest
import django_webtest
from django.core.urlresolvers import reverse
from ideasbox.tests.factories import UserFactory
@pytest.fixture()
def user():
return UserFactory(short_name="Hello", password='password')
@pytest.fixture()
def staffuser():
return UserFactory(short_name="Hello", password='password', is_staff=True)
@pytest.yield_fixture()
def app(request):
wtm = django_webtest.WebTestMixin()
wtm._patch_settings()
yield django_webtest.DjangoTestApp()
wtm._unpatch_settings()
@pytest.fixture()
def loggedapp(app, user):
"""Return an app with an already logged in user."""
form = app.get(reverse('login')).forms['login']
form['username'] = user.serial
form['password'] = 'password'
form.submit().follow()
setattr(app, 'user', user) # for later use, if needed
return app
@pytest.fixture()
def staffapp(app, staffuser):
"""Return an app with an already logged in staff user."""
form = app.get(reverse('login')).forms['login']
form['username'] = staffuser.serial
form['password'] = 'password'
form.submit().follow()
setattr(app, 'user', user) # for later use, if needed
return app
|
import pytest
import django_webtest
from django.core.urlresolvers import reverse
from ideasbox.tests.factories import UserFactory
@pytest.fixture()
def user():
return UserFactory(short_name="Hello", password='password')
@pytest.fixture()
def staffuser():
return UserFactory(short_name="Hello", password='password', is_staff=True)
@pytest.fixture()
def app(request):
wtm = django_webtest.WebTestMixin()
wtm._patch_settings()
request.addfinalizer(wtm._unpatch_settings)
return django_webtest.DjangoTestApp()
@pytest.fixture()
def loggedapp(app, user):
"""Return an app with an already logged in user."""
form = app.get(reverse('login')).forms['login']
form['username'] = user.serial
form['password'] = 'password'
form.submit().follow()
setattr(app, 'user', user) # for later use, if needed
return app
@pytest.fixture()
def staffapp(app, staffuser):
"""Return an app with an already logged in staff user."""
form = app.get(reverse('login')).forms['login']
form['username'] = staffuser.serial
form['password'] = 'password'
form.submit().follow()
setattr(app, 'user', user) # for later use, if needed
return app
Use yield_fixture for app fixtureimport pytest
import django_webtest
from django.core.urlresolvers import reverse
from ideasbox.tests.factories import UserFactory
@pytest.fixture()
def user():
return UserFactory(short_name="Hello", password='password')
@pytest.fixture()
def staffuser():
return UserFactory(short_name="Hello", password='password', is_staff=True)
@pytest.yield_fixture()
def app(request):
wtm = django_webtest.WebTestMixin()
wtm._patch_settings()
yield django_webtest.DjangoTestApp()
wtm._unpatch_settings()
@pytest.fixture()
def loggedapp(app, user):
"""Return an app with an already logged in user."""
form = app.get(reverse('login')).forms['login']
form['username'] = user.serial
form['password'] = 'password'
form.submit().follow()
setattr(app, 'user', user) # for later use, if needed
return app
@pytest.fixture()
def staffapp(app, staffuser):
"""Return an app with an already logged in staff user."""
form = app.get(reverse('login')).forms['login']
form['username'] = staffuser.serial
form['password'] = 'password'
form.submit().follow()
setattr(app, 'user', user) # for later use, if needed
return app
|
<commit_before>import pytest
import django_webtest
from django.core.urlresolvers import reverse
from ideasbox.tests.factories import UserFactory
@pytest.fixture()
def user():
return UserFactory(short_name="Hello", password='password')
@pytest.fixture()
def staffuser():
return UserFactory(short_name="Hello", password='password', is_staff=True)
@pytest.fixture()
def app(request):
wtm = django_webtest.WebTestMixin()
wtm._patch_settings()
request.addfinalizer(wtm._unpatch_settings)
return django_webtest.DjangoTestApp()
@pytest.fixture()
def loggedapp(app, user):
"""Return an app with an already logged in user."""
form = app.get(reverse('login')).forms['login']
form['username'] = user.serial
form['password'] = 'password'
form.submit().follow()
setattr(app, 'user', user) # for later use, if needed
return app
@pytest.fixture()
def staffapp(app, staffuser):
"""Return an app with an already logged in staff user."""
form = app.get(reverse('login')).forms['login']
form['username'] = staffuser.serial
form['password'] = 'password'
form.submit().follow()
setattr(app, 'user', user) # for later use, if needed
return app
<commit_msg>Use yield_fixture for app fixture<commit_after>import pytest
import django_webtest
from django.core.urlresolvers import reverse
from ideasbox.tests.factories import UserFactory
@pytest.fixture()
def user():
return UserFactory(short_name="Hello", password='password')
@pytest.fixture()
def staffuser():
return UserFactory(short_name="Hello", password='password', is_staff=True)
@pytest.yield_fixture()
def app(request):
wtm = django_webtest.WebTestMixin()
wtm._patch_settings()
yield django_webtest.DjangoTestApp()
wtm._unpatch_settings()
@pytest.fixture()
def loggedapp(app, user):
"""Return an app with an already logged in user."""
form = app.get(reverse('login')).forms['login']
form['username'] = user.serial
form['password'] = 'password'
form.submit().follow()
setattr(app, 'user', user) # for later use, if needed
return app
@pytest.fixture()
def staffapp(app, staffuser):
"""Return an app with an already logged in staff user."""
form = app.get(reverse('login')).forms['login']
form['username'] = staffuser.serial
form['password'] = 'password'
form.submit().follow()
setattr(app, 'user', user) # for later use, if needed
return app
|
0a5f09c90ace9c09379b8f2faa98ba7040298af9
|
QuantifiedDevOpenDashboardCommand.py
|
QuantifiedDevOpenDashboardCommand.py
|
import sublime, sublime_plugin, webbrowser
QD_URL = "http://localhost:5000/"
class GoToQuantifiedDevDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "QuantifiedDev.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
qd_url = QD_URL
url = "%(qd_url)sdashboard?streamId=%(stream_id)s&readToken=%(read_token)s" % locals()
webbrowser.open_new_tab(url)
|
import sublime, sublime_plugin, webbrowser
QD_URL = "http://localhost:5000"
class GoToQuantifiedDevDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "QuantifiedDev.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
qd_url = QD_URL
url = "%(qd_url)s/dashboard?streamId=%(stream_id)s&readToken=%(read_token)s" % locals()
webbrowser.open_new_tab(url)
|
Fix url to be consistent.
|
Fix url to be consistent.
|
Python
|
apache-2.0
|
1self/sublime-text-plugin,1self/sublime-text-plugin,1self/sublime-text-plugin
|
import sublime, sublime_plugin, webbrowser
QD_URL = "http://localhost:5000/"
class GoToQuantifiedDevDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "QuantifiedDev.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
qd_url = QD_URL
url = "%(qd_url)sdashboard?streamId=%(stream_id)s&readToken=%(read_token)s" % locals()
webbrowser.open_new_tab(url)Fix url to be consistent.
|
import sublime, sublime_plugin, webbrowser
QD_URL = "http://localhost:5000"
class GoToQuantifiedDevDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "QuantifiedDev.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
qd_url = QD_URL
url = "%(qd_url)s/dashboard?streamId=%(stream_id)s&readToken=%(read_token)s" % locals()
webbrowser.open_new_tab(url)
|
<commit_before>import sublime, sublime_plugin, webbrowser
QD_URL = "http://localhost:5000/"
class GoToQuantifiedDevDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "QuantifiedDev.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
qd_url = QD_URL
url = "%(qd_url)sdashboard?streamId=%(stream_id)s&readToken=%(read_token)s" % locals()
webbrowser.open_new_tab(url)<commit_msg>Fix url to be consistent.<commit_after>
|
import sublime, sublime_plugin, webbrowser
QD_URL = "http://localhost:5000"
class GoToQuantifiedDevDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "QuantifiedDev.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
qd_url = QD_URL
url = "%(qd_url)s/dashboard?streamId=%(stream_id)s&readToken=%(read_token)s" % locals()
webbrowser.open_new_tab(url)
|
import sublime, sublime_plugin, webbrowser
QD_URL = "http://localhost:5000/"
class GoToQuantifiedDevDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "QuantifiedDev.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
qd_url = QD_URL
url = "%(qd_url)sdashboard?streamId=%(stream_id)s&readToken=%(read_token)s" % locals()
webbrowser.open_new_tab(url)Fix url to be consistent.import sublime, sublime_plugin, webbrowser
QD_URL = "http://localhost:5000"
class GoToQuantifiedDevDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "QuantifiedDev.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
qd_url = QD_URL
url = "%(qd_url)s/dashboard?streamId=%(stream_id)s&readToken=%(read_token)s" % locals()
webbrowser.open_new_tab(url)
|
<commit_before>import sublime, sublime_plugin, webbrowser
QD_URL = "http://localhost:5000/"
class GoToQuantifiedDevDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "QuantifiedDev.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
qd_url = QD_URL
url = "%(qd_url)sdashboard?streamId=%(stream_id)s&readToken=%(read_token)s" % locals()
webbrowser.open_new_tab(url)<commit_msg>Fix url to be consistent.<commit_after>import sublime, sublime_plugin, webbrowser
QD_URL = "http://localhost:5000"
class GoToQuantifiedDevDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "QuantifiedDev.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
qd_url = QD_URL
url = "%(qd_url)s/dashboard?streamId=%(stream_id)s&readToken=%(read_token)s" % locals()
webbrowser.open_new_tab(url)
|
3509243e467a8546a3fa9ba123f77a1a96643402
|
xml_json_import/__init__.py
|
xml_json_import/__init__.py
|
from django.conf import settings
class XmlJsonImportModuleException(Exception):
pass
|
from django.conf import settings
class XmlJsonImportModuleException(Exception):
pass
if not hasattr(settings, 'XSLT_FILES_DIR'):
raise XmlJsonImportModuleException('Settings must contain XSLT_FILES_DIR parameter')
|
Throw exception for not existing XSLT_FILES_DIR setting
|
Throw exception for not existing XSLT_FILES_DIR setting
|
Python
|
mit
|
lev-veshnyakov/django-import-data,lev-veshnyakov/django-import-data
|
from django.conf import settings
class XmlJsonImportModuleException(Exception):
pass
Throw exception for not existing XSLT_FILES_DIR setting
|
from django.conf import settings
class XmlJsonImportModuleException(Exception):
pass
if not hasattr(settings, 'XSLT_FILES_DIR'):
raise XmlJsonImportModuleException('Settings must contain XSLT_FILES_DIR parameter')
|
<commit_before>from django.conf import settings
class XmlJsonImportModuleException(Exception):
pass
<commit_msg>Throw exception for not existing XSLT_FILES_DIR setting<commit_after>
|
from django.conf import settings
class XmlJsonImportModuleException(Exception):
pass
if not hasattr(settings, 'XSLT_FILES_DIR'):
raise XmlJsonImportModuleException('Settings must contain XSLT_FILES_DIR parameter')
|
from django.conf import settings
class XmlJsonImportModuleException(Exception):
pass
Throw exception for not existing XSLT_FILES_DIR settingfrom django.conf import settings
class XmlJsonImportModuleException(Exception):
pass
if not hasattr(settings, 'XSLT_FILES_DIR'):
raise XmlJsonImportModuleException('Settings must contain XSLT_FILES_DIR parameter')
|
<commit_before>from django.conf import settings
class XmlJsonImportModuleException(Exception):
pass
<commit_msg>Throw exception for not existing XSLT_FILES_DIR setting<commit_after>from django.conf import settings
class XmlJsonImportModuleException(Exception):
pass
if not hasattr(settings, 'XSLT_FILES_DIR'):
raise XmlJsonImportModuleException('Settings must contain XSLT_FILES_DIR parameter')
|
9e7aed847c2d5fcd6e00bc787d8b3558b590f605
|
api/logs/urls.py
|
api/logs/urls.py
|
from django.conf.urls import url
from api.logs import views
urlpatterns = [
url(r'^(?P<log_id>\w+)/$', views.NodeLogDetail.as_view(), name=views.NodeLogDetail.view_name),
url(r'^(?P<log_id>\w+)/nodes/$', views.LogNodeList.as_view(), name=views.LogNodeList.view_name),
]
|
from django.conf.urls import url
from api.logs import views
urlpatterns = [
url(r'^(?P<log_id>\w+)/$', views.NodeLogDetail.as_view(), name=views.NodeLogDetail.view_name),
url(r'^(?P<log_id>\w+)/nodes/$', views.LogNodeList.as_view(), name=views.LogNodeList.view_name),
url(r'^(?P<log_id>\w+)/added_contributors/$', views.NodeLogAddedContributors.as_view(), name=views.NodeLogAddedContributors.view_name),
]
|
Add /v2/logs/log_id/added_contributors/ to list of URL's.
|
Add /v2/logs/log_id/added_contributors/ to list of URL's.
|
Python
|
apache-2.0
|
abought/osf.io,mfraezz/osf.io,TomHeatwole/osf.io,chennan47/osf.io,RomanZWang/osf.io,alexschiller/osf.io,billyhunt/osf.io,crcresearch/osf.io,saradbowman/osf.io,acshi/osf.io,jnayak1/osf.io,RomanZWang/osf.io,emetsger/osf.io,KAsante95/osf.io,zachjanicki/osf.io,mattclark/osf.io,RomanZWang/osf.io,emetsger/osf.io,monikagrabowska/osf.io,laurenrevere/osf.io,TomBaxter/osf.io,samchrisinger/osf.io,emetsger/osf.io,billyhunt/osf.io,RomanZWang/osf.io,GageGaskins/osf.io,brandonPurvis/osf.io,zachjanicki/osf.io,kwierman/osf.io,samchrisinger/osf.io,TomBaxter/osf.io,aaxelb/osf.io,Nesiehr/osf.io,asanfilippo7/osf.io,SSJohns/osf.io,kch8qx/osf.io,asanfilippo7/osf.io,rdhyee/osf.io,cslzchen/osf.io,crcresearch/osf.io,HalcyonChimera/osf.io,mluo613/osf.io,hmoco/osf.io,erinspace/osf.io,doublebits/osf.io,felliott/osf.io,mfraezz/osf.io,cslzchen/osf.io,monikagrabowska/osf.io,zamattiac/osf.io,CenterForOpenScience/osf.io,abought/osf.io,leb2dg/osf.io,adlius/osf.io,Johnetordoff/osf.io,Nesiehr/osf.io,binoculars/osf.io,GageGaskins/osf.io,hmoco/osf.io,GageGaskins/osf.io,kwierman/osf.io,hmoco/osf.io,caneruguz/osf.io,SSJohns/osf.io,billyhunt/osf.io,DanielSBrown/osf.io,baylee-d/osf.io,mluo613/osf.io,rdhyee/osf.io,laurenrevere/osf.io,samchrisinger/osf.io,chennan47/osf.io,icereval/osf.io,rdhyee/osf.io,doublebits/osf.io,adlius/osf.io,caneruguz/osf.io,amyshi188/osf.io,jnayak1/osf.io,mluke93/osf.io,erinspace/osf.io,monikagrabowska/osf.io,KAsante95/osf.io,laurenrevere/osf.io,acshi/osf.io,Johnetordoff/osf.io,acshi/osf.io,crcresearch/osf.io,cwisecarver/osf.io,binoculars/osf.io,brianjgeiger/osf.io,sloria/osf.io,zachjanicki/osf.io,baylee-d/osf.io,KAsante95/osf.io,caseyrollins/osf.io,doublebits/osf.io,brandonPurvis/osf.io,chrisseto/osf.io,mattclark/osf.io,pattisdr/osf.io,baylee-d/osf.io,KAsante95/osf.io,brandonPurvis/osf.io,icereval/osf.io,wearpants/osf.io,aaxelb/osf.io,caseyrollins/osf.io,erinspace/osf.io,alexschiller/osf.io,brandonPurvis/osf.io,mluke93/osf.io,leb2dg/osf.io,Nesiehr/osf.io,amyshi188/osf.io,caneruguz/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,sloria/osf.io,kwierman/osf.io,samchrisinger/osf.io,doublebits/osf.io,SSJohns/osf.io,Johnetordoff/osf.io,mluke93/osf.io,mfraezz/osf.io,saradbowman/osf.io,kch8qx/osf.io,KAsante95/osf.io,cwisecarver/osf.io,leb2dg/osf.io,TomHeatwole/osf.io,alexschiller/osf.io,chrisseto/osf.io,acshi/osf.io,amyshi188/osf.io,chrisseto/osf.io,DanielSBrown/osf.io,mattclark/osf.io,cslzchen/osf.io,Nesiehr/osf.io,wearpants/osf.io,CenterForOpenScience/osf.io,cwisecarver/osf.io,SSJohns/osf.io,DanielSBrown/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,GageGaskins/osf.io,CenterForOpenScience/osf.io,mluke93/osf.io,acshi/osf.io,cwisecarver/osf.io,kwierman/osf.io,abought/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,zachjanicki/osf.io,felliott/osf.io,adlius/osf.io,felliott/osf.io,jnayak1/osf.io,binoculars/osf.io,DanielSBrown/osf.io,zamattiac/osf.io,billyhunt/osf.io,abought/osf.io,mluo613/osf.io,zamattiac/osf.io,GageGaskins/osf.io,mluo613/osf.io,brandonPurvis/osf.io,amyshi188/osf.io,brianjgeiger/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,hmoco/osf.io,wearpants/osf.io,TomBaxter/osf.io,aaxelb/osf.io,alexschiller/osf.io,caseyrollins/osf.io,mfraezz/osf.io,doublebits/osf.io,zamattiac/osf.io,sloria/osf.io,pattisdr/osf.io,pattisdr/osf.io,rdhyee/osf.io,asanfilippo7/osf.io,asanfilippo7/osf.io,felliott/osf.io,monikagrabowska/osf.io,wearpants/osf.io,jnayak1/osf.io,monikagrabowska/osf.io,adlius/osf.io,emetsger/osf.io,RomanZWang/osf.io,chrisseto/osf.io,kch8qx/osf.io,billyhunt/osf.io,chennan47/osf.io,kch8qx/osf.io,icereval/osf.io,TomHeatwole/osf.io,mluo613/osf.io,TomHeatwole/osf.io,alexschiller/osf.io,kch8qx/osf.io
|
from django.conf.urls import url
from api.logs import views
urlpatterns = [
url(r'^(?P<log_id>\w+)/$', views.NodeLogDetail.as_view(), name=views.NodeLogDetail.view_name),
url(r'^(?P<log_id>\w+)/nodes/$', views.LogNodeList.as_view(), name=views.LogNodeList.view_name),
]
Add /v2/logs/log_id/added_contributors/ to list of URL's.
|
from django.conf.urls import url
from api.logs import views
urlpatterns = [
url(r'^(?P<log_id>\w+)/$', views.NodeLogDetail.as_view(), name=views.NodeLogDetail.view_name),
url(r'^(?P<log_id>\w+)/nodes/$', views.LogNodeList.as_view(), name=views.LogNodeList.view_name),
url(r'^(?P<log_id>\w+)/added_contributors/$', views.NodeLogAddedContributors.as_view(), name=views.NodeLogAddedContributors.view_name),
]
|
<commit_before>from django.conf.urls import url
from api.logs import views
urlpatterns = [
url(r'^(?P<log_id>\w+)/$', views.NodeLogDetail.as_view(), name=views.NodeLogDetail.view_name),
url(r'^(?P<log_id>\w+)/nodes/$', views.LogNodeList.as_view(), name=views.LogNodeList.view_name),
]
<commit_msg>Add /v2/logs/log_id/added_contributors/ to list of URL's.<commit_after>
|
from django.conf.urls import url
from api.logs import views
urlpatterns = [
url(r'^(?P<log_id>\w+)/$', views.NodeLogDetail.as_view(), name=views.NodeLogDetail.view_name),
url(r'^(?P<log_id>\w+)/nodes/$', views.LogNodeList.as_view(), name=views.LogNodeList.view_name),
url(r'^(?P<log_id>\w+)/added_contributors/$', views.NodeLogAddedContributors.as_view(), name=views.NodeLogAddedContributors.view_name),
]
|
from django.conf.urls import url
from api.logs import views
urlpatterns = [
url(r'^(?P<log_id>\w+)/$', views.NodeLogDetail.as_view(), name=views.NodeLogDetail.view_name),
url(r'^(?P<log_id>\w+)/nodes/$', views.LogNodeList.as_view(), name=views.LogNodeList.view_name),
]
Add /v2/logs/log_id/added_contributors/ to list of URL's.from django.conf.urls import url
from api.logs import views
urlpatterns = [
url(r'^(?P<log_id>\w+)/$', views.NodeLogDetail.as_view(), name=views.NodeLogDetail.view_name),
url(r'^(?P<log_id>\w+)/nodes/$', views.LogNodeList.as_view(), name=views.LogNodeList.view_name),
url(r'^(?P<log_id>\w+)/added_contributors/$', views.NodeLogAddedContributors.as_view(), name=views.NodeLogAddedContributors.view_name),
]
|
<commit_before>from django.conf.urls import url
from api.logs import views
urlpatterns = [
url(r'^(?P<log_id>\w+)/$', views.NodeLogDetail.as_view(), name=views.NodeLogDetail.view_name),
url(r'^(?P<log_id>\w+)/nodes/$', views.LogNodeList.as_view(), name=views.LogNodeList.view_name),
]
<commit_msg>Add /v2/logs/log_id/added_contributors/ to list of URL's.<commit_after>from django.conf.urls import url
from api.logs import views
urlpatterns = [
url(r'^(?P<log_id>\w+)/$', views.NodeLogDetail.as_view(), name=views.NodeLogDetail.view_name),
url(r'^(?P<log_id>\w+)/nodes/$', views.LogNodeList.as_view(), name=views.LogNodeList.view_name),
url(r'^(?P<log_id>\w+)/added_contributors/$', views.NodeLogAddedContributors.as_view(), name=views.NodeLogAddedContributors.view_name),
]
|
a9c6e045631103fe8508fd1b60d6076c05092fe1
|
tests/examples/customnode/nodes.py
|
tests/examples/customnode/nodes.py
|
from viewflow.activation import AbstractGateActivation, Activation
from viewflow.flow import base
from viewflow.token import Token
class DynamicSplitActivation(AbstractGateActivation):
def calculate_next(self):
self._split_count = self.flow_task._task_count_callback(self.process)
@Activation.status.super()
def activate_next(self):
if self._split_count:
token_source = Token.split_token_source(self.task.token, self.task.pk)
for _ in range(self._split_count):
self.flow_task._next.activate(prev_activation=self, token=next(token_source))
class DynamicSplit(base.NextNodeMixin, base.DetailsViewMixin, base.Gateway):
"""
Activates several outgoing task instances depends on callback value
Example::
spit_on_decision = flow.DynamicSplit(lambda p: 4) \\
.Next(this.make_decision)
make_decision = flow.View(MyView) \\
.Next(this.join_on_decision)
join_on_decision = flow.Join() \\
.Next(this.end)
"""
task_type = 'SPLIT'
activation_cls = DynamicSplitActivation
def __init__(self, callback):
super(DynamicSplit, self).__init__()
self._task_count_callback = callback
|
from viewflow.activation import AbstractGateActivation
from viewflow.flow import base
from viewflow.token import Token
class DynamicSplitActivation(AbstractGateActivation):
def calculate_next(self):
self._split_count = self.flow_task._task_count_callback(self.process)
def activate_next(self):
if self._split_count:
token_source = Token.split_token_source(self.task.token, self.task.pk)
for _ in range(self._split_count):
self.flow_task._next.activate(prev_activation=self, token=next(token_source))
class DynamicSplit(base.NextNodeMixin,
base.UndoViewMixin,
base.CancelViewMixin,
base.PerformViewMixin,
base.DetailsViewMixin,
base.Gateway):
"""
Activates several outgoing task instances depends on callback value
Example::
spit_on_decision = flow.DynamicSplit(lambda p: 4) \\
.Next(this.make_decision)
make_decision = flow.View(MyView) \\
.Next(this.join_on_decision)
join_on_decision = flow.Join() \\
.Next(this.end)
"""
task_type = 'SPLIT'
activation_cls = DynamicSplitActivation
def __init__(self, callback):
super(DynamicSplit, self).__init__()
self._task_count_callback = callback
|
Add undo to custom node sample
|
Add undo to custom node sample
|
Python
|
agpl-3.0
|
ribeiro-ucl/viewflow,codingjoe/viewflow,pombredanne/viewflow,pombredanne/viewflow,codingjoe/viewflow,codingjoe/viewflow,viewflow/viewflow,viewflow/viewflow,ribeiro-ucl/viewflow,viewflow/viewflow,ribeiro-ucl/viewflow
|
from viewflow.activation import AbstractGateActivation, Activation
from viewflow.flow import base
from viewflow.token import Token
class DynamicSplitActivation(AbstractGateActivation):
def calculate_next(self):
self._split_count = self.flow_task._task_count_callback(self.process)
@Activation.status.super()
def activate_next(self):
if self._split_count:
token_source = Token.split_token_source(self.task.token, self.task.pk)
for _ in range(self._split_count):
self.flow_task._next.activate(prev_activation=self, token=next(token_source))
class DynamicSplit(base.NextNodeMixin, base.DetailsViewMixin, base.Gateway):
"""
Activates several outgoing task instances depends on callback value
Example::
spit_on_decision = flow.DynamicSplit(lambda p: 4) \\
.Next(this.make_decision)
make_decision = flow.View(MyView) \\
.Next(this.join_on_decision)
join_on_decision = flow.Join() \\
.Next(this.end)
"""
task_type = 'SPLIT'
activation_cls = DynamicSplitActivation
def __init__(self, callback):
super(DynamicSplit, self).__init__()
self._task_count_callback = callback
Add undo to custom node sample
|
from viewflow.activation import AbstractGateActivation
from viewflow.flow import base
from viewflow.token import Token
class DynamicSplitActivation(AbstractGateActivation):
def calculate_next(self):
self._split_count = self.flow_task._task_count_callback(self.process)
def activate_next(self):
if self._split_count:
token_source = Token.split_token_source(self.task.token, self.task.pk)
for _ in range(self._split_count):
self.flow_task._next.activate(prev_activation=self, token=next(token_source))
class DynamicSplit(base.NextNodeMixin,
base.UndoViewMixin,
base.CancelViewMixin,
base.PerformViewMixin,
base.DetailsViewMixin,
base.Gateway):
"""
Activates several outgoing task instances depends on callback value
Example::
spit_on_decision = flow.DynamicSplit(lambda p: 4) \\
.Next(this.make_decision)
make_decision = flow.View(MyView) \\
.Next(this.join_on_decision)
join_on_decision = flow.Join() \\
.Next(this.end)
"""
task_type = 'SPLIT'
activation_cls = DynamicSplitActivation
def __init__(self, callback):
super(DynamicSplit, self).__init__()
self._task_count_callback = callback
|
<commit_before>from viewflow.activation import AbstractGateActivation, Activation
from viewflow.flow import base
from viewflow.token import Token
class DynamicSplitActivation(AbstractGateActivation):
def calculate_next(self):
self._split_count = self.flow_task._task_count_callback(self.process)
@Activation.status.super()
def activate_next(self):
if self._split_count:
token_source = Token.split_token_source(self.task.token, self.task.pk)
for _ in range(self._split_count):
self.flow_task._next.activate(prev_activation=self, token=next(token_source))
class DynamicSplit(base.NextNodeMixin, base.DetailsViewMixin, base.Gateway):
"""
Activates several outgoing task instances depends on callback value
Example::
spit_on_decision = flow.DynamicSplit(lambda p: 4) \\
.Next(this.make_decision)
make_decision = flow.View(MyView) \\
.Next(this.join_on_decision)
join_on_decision = flow.Join() \\
.Next(this.end)
"""
task_type = 'SPLIT'
activation_cls = DynamicSplitActivation
def __init__(self, callback):
super(DynamicSplit, self).__init__()
self._task_count_callback = callback
<commit_msg>Add undo to custom node sample<commit_after>
|
from viewflow.activation import AbstractGateActivation
from viewflow.flow import base
from viewflow.token import Token
class DynamicSplitActivation(AbstractGateActivation):
def calculate_next(self):
self._split_count = self.flow_task._task_count_callback(self.process)
def activate_next(self):
if self._split_count:
token_source = Token.split_token_source(self.task.token, self.task.pk)
for _ in range(self._split_count):
self.flow_task._next.activate(prev_activation=self, token=next(token_source))
class DynamicSplit(base.NextNodeMixin,
base.UndoViewMixin,
base.CancelViewMixin,
base.PerformViewMixin,
base.DetailsViewMixin,
base.Gateway):
"""
Activates several outgoing task instances depends on callback value
Example::
spit_on_decision = flow.DynamicSplit(lambda p: 4) \\
.Next(this.make_decision)
make_decision = flow.View(MyView) \\
.Next(this.join_on_decision)
join_on_decision = flow.Join() \\
.Next(this.end)
"""
task_type = 'SPLIT'
activation_cls = DynamicSplitActivation
def __init__(self, callback):
super(DynamicSplit, self).__init__()
self._task_count_callback = callback
|
from viewflow.activation import AbstractGateActivation, Activation
from viewflow.flow import base
from viewflow.token import Token
class DynamicSplitActivation(AbstractGateActivation):
def calculate_next(self):
self._split_count = self.flow_task._task_count_callback(self.process)
@Activation.status.super()
def activate_next(self):
if self._split_count:
token_source = Token.split_token_source(self.task.token, self.task.pk)
for _ in range(self._split_count):
self.flow_task._next.activate(prev_activation=self, token=next(token_source))
class DynamicSplit(base.NextNodeMixin, base.DetailsViewMixin, base.Gateway):
"""
Activates several outgoing task instances depends on callback value
Example::
spit_on_decision = flow.DynamicSplit(lambda p: 4) \\
.Next(this.make_decision)
make_decision = flow.View(MyView) \\
.Next(this.join_on_decision)
join_on_decision = flow.Join() \\
.Next(this.end)
"""
task_type = 'SPLIT'
activation_cls = DynamicSplitActivation
def __init__(self, callback):
super(DynamicSplit, self).__init__()
self._task_count_callback = callback
Add undo to custom node samplefrom viewflow.activation import AbstractGateActivation
from viewflow.flow import base
from viewflow.token import Token
class DynamicSplitActivation(AbstractGateActivation):
def calculate_next(self):
self._split_count = self.flow_task._task_count_callback(self.process)
def activate_next(self):
if self._split_count:
token_source = Token.split_token_source(self.task.token, self.task.pk)
for _ in range(self._split_count):
self.flow_task._next.activate(prev_activation=self, token=next(token_source))
class DynamicSplit(base.NextNodeMixin,
base.UndoViewMixin,
base.CancelViewMixin,
base.PerformViewMixin,
base.DetailsViewMixin,
base.Gateway):
"""
Activates several outgoing task instances depends on callback value
Example::
spit_on_decision = flow.DynamicSplit(lambda p: 4) \\
.Next(this.make_decision)
make_decision = flow.View(MyView) \\
.Next(this.join_on_decision)
join_on_decision = flow.Join() \\
.Next(this.end)
"""
task_type = 'SPLIT'
activation_cls = DynamicSplitActivation
def __init__(self, callback):
super(DynamicSplit, self).__init__()
self._task_count_callback = callback
|
<commit_before>from viewflow.activation import AbstractGateActivation, Activation
from viewflow.flow import base
from viewflow.token import Token
class DynamicSplitActivation(AbstractGateActivation):
def calculate_next(self):
self._split_count = self.flow_task._task_count_callback(self.process)
@Activation.status.super()
def activate_next(self):
if self._split_count:
token_source = Token.split_token_source(self.task.token, self.task.pk)
for _ in range(self._split_count):
self.flow_task._next.activate(prev_activation=self, token=next(token_source))
class DynamicSplit(base.NextNodeMixin, base.DetailsViewMixin, base.Gateway):
"""
Activates several outgoing task instances depends on callback value
Example::
spit_on_decision = flow.DynamicSplit(lambda p: 4) \\
.Next(this.make_decision)
make_decision = flow.View(MyView) \\
.Next(this.join_on_decision)
join_on_decision = flow.Join() \\
.Next(this.end)
"""
task_type = 'SPLIT'
activation_cls = DynamicSplitActivation
def __init__(self, callback):
super(DynamicSplit, self).__init__()
self._task_count_callback = callback
<commit_msg>Add undo to custom node sample<commit_after>from viewflow.activation import AbstractGateActivation
from viewflow.flow import base
from viewflow.token import Token
class DynamicSplitActivation(AbstractGateActivation):
def calculate_next(self):
self._split_count = self.flow_task._task_count_callback(self.process)
def activate_next(self):
if self._split_count:
token_source = Token.split_token_source(self.task.token, self.task.pk)
for _ in range(self._split_count):
self.flow_task._next.activate(prev_activation=self, token=next(token_source))
class DynamicSplit(base.NextNodeMixin,
base.UndoViewMixin,
base.CancelViewMixin,
base.PerformViewMixin,
base.DetailsViewMixin,
base.Gateway):
"""
Activates several outgoing task instances depends on callback value
Example::
spit_on_decision = flow.DynamicSplit(lambda p: 4) \\
.Next(this.make_decision)
make_decision = flow.View(MyView) \\
.Next(this.join_on_decision)
join_on_decision = flow.Join() \\
.Next(this.end)
"""
task_type = 'SPLIT'
activation_cls = DynamicSplitActivation
def __init__(self, callback):
super(DynamicSplit, self).__init__()
self._task_count_callback = callback
|
cc8b115c6ab8265e5122e992a8ebe9960c92ada9
|
awx/sso/strategies/django_strategy.py
|
awx/sso/strategies/django_strategy.py
|
# Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
from social.strategies.django_strategy import DjangoStrategy
class AWXDjangoStrategy(DjangoStrategy):
"""A DjangoStrategy for python-social-auth containing
fixes and updates from social-app-django
TODO: Revert back to using the default DjangoStrategy after
we upgrade to social-core / social-app-django. We will also
want to ensure we update the SOCIAL_AUTH_STRATEGY setting.
"""
def request_port(self):
"""Port in use for this request
https://github.com/python-social-auth/social-app-django/blob/master/social_django/strategy.py#L76
"""
try: # django >= 1.9
return self.request.get_port()
except AttributeError: # django < 1.9
host_parts = self.request.get_host().split(':')
try:
return host_parts[1]
except IndexError:
return self.request.META['SERVER_PORT']
|
# Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
# Django
from django.conf import settings
# Python social auth
from social.strategies.django_strategy import DjangoStrategy
class AWXDjangoStrategy(DjangoStrategy):
"""A DjangoStrategy for python-social-auth containing
fixes and updates from social-app-django
TODO: Revert back to using the default DjangoStrategy after
we upgrade to social-core / social-app-django and upgrade Django
to 1.9 and above. We will also want to ensure we update the
SOCIAL_AUTH_STRATEGY setting.
"""
def request_port(self):
"""Port in use for this request
https://github.com/python-social-auth/social-app-django/blob/master/social_django/strategy.py#L76
"""
try: # django >= 1.9
return self.request.get_port()
except AttributeError: # django < 1.9
host_parts = self.request.get_host().split(':')
try:
return host_parts[1]
except IndexError:
if settings.USE_X_FORWARDED_PORT and 'HTTP_X_FORWARDED_PORT' in self.request.META:
return self.request.META['HTTP_X_FORWARDED_PORT']
else:
return self.request.META['SERVER_PORT']
|
Fix SAML auth behind load balancer issue.
|
Fix SAML auth behind load balancer issue.
Relates to #7586 of ansible-tower as a follow-up of fix #420 of tower.
The original fix works for Django version 1.9 and above, this PR
expanded the solution to Django verison 1.8 and below.
Signed-off-by: Aaron Tan <[email protected]>
|
Python
|
apache-2.0
|
wwitzel3/awx,wwitzel3/awx,wwitzel3/awx,wwitzel3/awx
|
# Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
from social.strategies.django_strategy import DjangoStrategy
class AWXDjangoStrategy(DjangoStrategy):
"""A DjangoStrategy for python-social-auth containing
fixes and updates from social-app-django
TODO: Revert back to using the default DjangoStrategy after
we upgrade to social-core / social-app-django. We will also
want to ensure we update the SOCIAL_AUTH_STRATEGY setting.
"""
def request_port(self):
"""Port in use for this request
https://github.com/python-social-auth/social-app-django/blob/master/social_django/strategy.py#L76
"""
try: # django >= 1.9
return self.request.get_port()
except AttributeError: # django < 1.9
host_parts = self.request.get_host().split(':')
try:
return host_parts[1]
except IndexError:
return self.request.META['SERVER_PORT']
Fix SAML auth behind load balancer issue.
Relates to #7586 of ansible-tower as a follow-up of fix #420 of tower.
The original fix works for Django version 1.9 and above, this PR
expanded the solution to Django verison 1.8 and below.
Signed-off-by: Aaron Tan <[email protected]>
|
# Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
# Django
from django.conf import settings
# Python social auth
from social.strategies.django_strategy import DjangoStrategy
class AWXDjangoStrategy(DjangoStrategy):
"""A DjangoStrategy for python-social-auth containing
fixes and updates from social-app-django
TODO: Revert back to using the default DjangoStrategy after
we upgrade to social-core / social-app-django and upgrade Django
to 1.9 and above. We will also want to ensure we update the
SOCIAL_AUTH_STRATEGY setting.
"""
def request_port(self):
"""Port in use for this request
https://github.com/python-social-auth/social-app-django/blob/master/social_django/strategy.py#L76
"""
try: # django >= 1.9
return self.request.get_port()
except AttributeError: # django < 1.9
host_parts = self.request.get_host().split(':')
try:
return host_parts[1]
except IndexError:
if settings.USE_X_FORWARDED_PORT and 'HTTP_X_FORWARDED_PORT' in self.request.META:
return self.request.META['HTTP_X_FORWARDED_PORT']
else:
return self.request.META['SERVER_PORT']
|
<commit_before># Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
from social.strategies.django_strategy import DjangoStrategy
class AWXDjangoStrategy(DjangoStrategy):
"""A DjangoStrategy for python-social-auth containing
fixes and updates from social-app-django
TODO: Revert back to using the default DjangoStrategy after
we upgrade to social-core / social-app-django. We will also
want to ensure we update the SOCIAL_AUTH_STRATEGY setting.
"""
def request_port(self):
"""Port in use for this request
https://github.com/python-social-auth/social-app-django/blob/master/social_django/strategy.py#L76
"""
try: # django >= 1.9
return self.request.get_port()
except AttributeError: # django < 1.9
host_parts = self.request.get_host().split(':')
try:
return host_parts[1]
except IndexError:
return self.request.META['SERVER_PORT']
<commit_msg>Fix SAML auth behind load balancer issue.
Relates to #7586 of ansible-tower as a follow-up of fix #420 of tower.
The original fix works for Django version 1.9 and above, this PR
expanded the solution to Django verison 1.8 and below.
Signed-off-by: Aaron Tan <[email protected]><commit_after>
|
# Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
# Django
from django.conf import settings
# Python social auth
from social.strategies.django_strategy import DjangoStrategy
class AWXDjangoStrategy(DjangoStrategy):
"""A DjangoStrategy for python-social-auth containing
fixes and updates from social-app-django
TODO: Revert back to using the default DjangoStrategy after
we upgrade to social-core / social-app-django and upgrade Django
to 1.9 and above. We will also want to ensure we update the
SOCIAL_AUTH_STRATEGY setting.
"""
def request_port(self):
"""Port in use for this request
https://github.com/python-social-auth/social-app-django/blob/master/social_django/strategy.py#L76
"""
try: # django >= 1.9
return self.request.get_port()
except AttributeError: # django < 1.9
host_parts = self.request.get_host().split(':')
try:
return host_parts[1]
except IndexError:
if settings.USE_X_FORWARDED_PORT and 'HTTP_X_FORWARDED_PORT' in self.request.META:
return self.request.META['HTTP_X_FORWARDED_PORT']
else:
return self.request.META['SERVER_PORT']
|
# Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
from social.strategies.django_strategy import DjangoStrategy
class AWXDjangoStrategy(DjangoStrategy):
"""A DjangoStrategy for python-social-auth containing
fixes and updates from social-app-django
TODO: Revert back to using the default DjangoStrategy after
we upgrade to social-core / social-app-django. We will also
want to ensure we update the SOCIAL_AUTH_STRATEGY setting.
"""
def request_port(self):
"""Port in use for this request
https://github.com/python-social-auth/social-app-django/blob/master/social_django/strategy.py#L76
"""
try: # django >= 1.9
return self.request.get_port()
except AttributeError: # django < 1.9
host_parts = self.request.get_host().split(':')
try:
return host_parts[1]
except IndexError:
return self.request.META['SERVER_PORT']
Fix SAML auth behind load balancer issue.
Relates to #7586 of ansible-tower as a follow-up of fix #420 of tower.
The original fix works for Django version 1.9 and above, this PR
expanded the solution to Django verison 1.8 and below.
Signed-off-by: Aaron Tan <[email protected]># Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
# Django
from django.conf import settings
# Python social auth
from social.strategies.django_strategy import DjangoStrategy
class AWXDjangoStrategy(DjangoStrategy):
"""A DjangoStrategy for python-social-auth containing
fixes and updates from social-app-django
TODO: Revert back to using the default DjangoStrategy after
we upgrade to social-core / social-app-django and upgrade Django
to 1.9 and above. We will also want to ensure we update the
SOCIAL_AUTH_STRATEGY setting.
"""
def request_port(self):
"""Port in use for this request
https://github.com/python-social-auth/social-app-django/blob/master/social_django/strategy.py#L76
"""
try: # django >= 1.9
return self.request.get_port()
except AttributeError: # django < 1.9
host_parts = self.request.get_host().split(':')
try:
return host_parts[1]
except IndexError:
if settings.USE_X_FORWARDED_PORT and 'HTTP_X_FORWARDED_PORT' in self.request.META:
return self.request.META['HTTP_X_FORWARDED_PORT']
else:
return self.request.META['SERVER_PORT']
|
<commit_before># Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
from social.strategies.django_strategy import DjangoStrategy
class AWXDjangoStrategy(DjangoStrategy):
"""A DjangoStrategy for python-social-auth containing
fixes and updates from social-app-django
TODO: Revert back to using the default DjangoStrategy after
we upgrade to social-core / social-app-django. We will also
want to ensure we update the SOCIAL_AUTH_STRATEGY setting.
"""
def request_port(self):
"""Port in use for this request
https://github.com/python-social-auth/social-app-django/blob/master/social_django/strategy.py#L76
"""
try: # django >= 1.9
return self.request.get_port()
except AttributeError: # django < 1.9
host_parts = self.request.get_host().split(':')
try:
return host_parts[1]
except IndexError:
return self.request.META['SERVER_PORT']
<commit_msg>Fix SAML auth behind load balancer issue.
Relates to #7586 of ansible-tower as a follow-up of fix #420 of tower.
The original fix works for Django version 1.9 and above, this PR
expanded the solution to Django verison 1.8 and below.
Signed-off-by: Aaron Tan <[email protected]><commit_after># Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
# Django
from django.conf import settings
# Python social auth
from social.strategies.django_strategy import DjangoStrategy
class AWXDjangoStrategy(DjangoStrategy):
"""A DjangoStrategy for python-social-auth containing
fixes and updates from social-app-django
TODO: Revert back to using the default DjangoStrategy after
we upgrade to social-core / social-app-django and upgrade Django
to 1.9 and above. We will also want to ensure we update the
SOCIAL_AUTH_STRATEGY setting.
"""
def request_port(self):
"""Port in use for this request
https://github.com/python-social-auth/social-app-django/blob/master/social_django/strategy.py#L76
"""
try: # django >= 1.9
return self.request.get_port()
except AttributeError: # django < 1.9
host_parts = self.request.get_host().split(':')
try:
return host_parts[1]
except IndexError:
if settings.USE_X_FORWARDED_PORT and 'HTTP_X_FORWARDED_PORT' in self.request.META:
return self.request.META['HTTP_X_FORWARDED_PORT']
else:
return self.request.META['SERVER_PORT']
|
fffca3d2198f7c65b2e4fa2b805efa54f4c9fdb9
|
tests/zeus/artifacts/test_xunit.py
|
tests/zeus/artifacts/test_xunit.py
|
from io import BytesIO
from zeus.artifacts.xunit import XunitHandler
from zeus.constants import Result
from zeus.models import Job
from zeus.utils.testresult import TestResult as ZeusTestResult
def test_result_generation(sample_xunit):
job = Job()
fp = BytesIO(sample_xunit.encode("utf8"))
handler = XunitHandler(job)
results = handler.get_tests(fp)
assert len(results) == 2
r1 = results[0]
assert type(r1) == ZeusTestResult
assert r1.job == job
assert r1.name == "tests.test_report"
assert r1.duration == 0.0
assert r1.result == Result.failed
assert (
r1.message
== """tests/test_report.py:1: in <module>
> import mock
E ImportError: No module named mock"""
)
r2 = results[1]
assert type(r2) == ZeusTestResult
assert r2.job == job
assert r2.name == "tests.test_report.ParseTestResultsTest.test_simple"
assert r2.duration == 1.65796279907
assert r2.result == Result.passed
assert r2.message == ""
|
from io import BytesIO
from zeus.artifacts.xunit import XunitHandler
from zeus.constants import Result
from zeus.models import Job
from zeus.utils.testresult import TestResult as ZeusTestResult
def test_result_generation(sample_xunit):
job = Job()
fp = BytesIO(sample_xunit.encode("utf8"))
handler = XunitHandler(job)
results = handler.get_tests(fp)
assert len(results) == 2
r1 = results[0]
assert type(r1) == ZeusTestResult
assert r1.job == job
assert r1.name == "tests.test_report"
assert r1.duration == 0
assert r1.result == Result.failed
assert (
r1.message
== """tests/test_report.py:1: in <module>
> import mock
E ImportError: No module named mock"""
)
r2 = results[1]
assert type(r2) == ZeusTestResult
assert r2.job == job
assert r2.name == "tests.test_report.ParseTestResultsTest.test_simple"
assert r2.duration == 1
assert r2.result == Result.passed
assert r2.message == ""
|
Fix test case being integers
|
test: Fix test case being integers
|
Python
|
apache-2.0
|
getsentry/zeus,getsentry/zeus,getsentry/zeus,getsentry/zeus
|
from io import BytesIO
from zeus.artifacts.xunit import XunitHandler
from zeus.constants import Result
from zeus.models import Job
from zeus.utils.testresult import TestResult as ZeusTestResult
def test_result_generation(sample_xunit):
job = Job()
fp = BytesIO(sample_xunit.encode("utf8"))
handler = XunitHandler(job)
results = handler.get_tests(fp)
assert len(results) == 2
r1 = results[0]
assert type(r1) == ZeusTestResult
assert r1.job == job
assert r1.name == "tests.test_report"
assert r1.duration == 0.0
assert r1.result == Result.failed
assert (
r1.message
== """tests/test_report.py:1: in <module>
> import mock
E ImportError: No module named mock"""
)
r2 = results[1]
assert type(r2) == ZeusTestResult
assert r2.job == job
assert r2.name == "tests.test_report.ParseTestResultsTest.test_simple"
assert r2.duration == 1.65796279907
assert r2.result == Result.passed
assert r2.message == ""
test: Fix test case being integers
|
from io import BytesIO
from zeus.artifacts.xunit import XunitHandler
from zeus.constants import Result
from zeus.models import Job
from zeus.utils.testresult import TestResult as ZeusTestResult
def test_result_generation(sample_xunit):
job = Job()
fp = BytesIO(sample_xunit.encode("utf8"))
handler = XunitHandler(job)
results = handler.get_tests(fp)
assert len(results) == 2
r1 = results[0]
assert type(r1) == ZeusTestResult
assert r1.job == job
assert r1.name == "tests.test_report"
assert r1.duration == 0
assert r1.result == Result.failed
assert (
r1.message
== """tests/test_report.py:1: in <module>
> import mock
E ImportError: No module named mock"""
)
r2 = results[1]
assert type(r2) == ZeusTestResult
assert r2.job == job
assert r2.name == "tests.test_report.ParseTestResultsTest.test_simple"
assert r2.duration == 1
assert r2.result == Result.passed
assert r2.message == ""
|
<commit_before>from io import BytesIO
from zeus.artifacts.xunit import XunitHandler
from zeus.constants import Result
from zeus.models import Job
from zeus.utils.testresult import TestResult as ZeusTestResult
def test_result_generation(sample_xunit):
job = Job()
fp = BytesIO(sample_xunit.encode("utf8"))
handler = XunitHandler(job)
results = handler.get_tests(fp)
assert len(results) == 2
r1 = results[0]
assert type(r1) == ZeusTestResult
assert r1.job == job
assert r1.name == "tests.test_report"
assert r1.duration == 0.0
assert r1.result == Result.failed
assert (
r1.message
== """tests/test_report.py:1: in <module>
> import mock
E ImportError: No module named mock"""
)
r2 = results[1]
assert type(r2) == ZeusTestResult
assert r2.job == job
assert r2.name == "tests.test_report.ParseTestResultsTest.test_simple"
assert r2.duration == 1.65796279907
assert r2.result == Result.passed
assert r2.message == ""
<commit_msg>test: Fix test case being integers<commit_after>
|
from io import BytesIO
from zeus.artifacts.xunit import XunitHandler
from zeus.constants import Result
from zeus.models import Job
from zeus.utils.testresult import TestResult as ZeusTestResult
def test_result_generation(sample_xunit):
job = Job()
fp = BytesIO(sample_xunit.encode("utf8"))
handler = XunitHandler(job)
results = handler.get_tests(fp)
assert len(results) == 2
r1 = results[0]
assert type(r1) == ZeusTestResult
assert r1.job == job
assert r1.name == "tests.test_report"
assert r1.duration == 0
assert r1.result == Result.failed
assert (
r1.message
== """tests/test_report.py:1: in <module>
> import mock
E ImportError: No module named mock"""
)
r2 = results[1]
assert type(r2) == ZeusTestResult
assert r2.job == job
assert r2.name == "tests.test_report.ParseTestResultsTest.test_simple"
assert r2.duration == 1
assert r2.result == Result.passed
assert r2.message == ""
|
from io import BytesIO
from zeus.artifacts.xunit import XunitHandler
from zeus.constants import Result
from zeus.models import Job
from zeus.utils.testresult import TestResult as ZeusTestResult
def test_result_generation(sample_xunit):
job = Job()
fp = BytesIO(sample_xunit.encode("utf8"))
handler = XunitHandler(job)
results = handler.get_tests(fp)
assert len(results) == 2
r1 = results[0]
assert type(r1) == ZeusTestResult
assert r1.job == job
assert r1.name == "tests.test_report"
assert r1.duration == 0.0
assert r1.result == Result.failed
assert (
r1.message
== """tests/test_report.py:1: in <module>
> import mock
E ImportError: No module named mock"""
)
r2 = results[1]
assert type(r2) == ZeusTestResult
assert r2.job == job
assert r2.name == "tests.test_report.ParseTestResultsTest.test_simple"
assert r2.duration == 1.65796279907
assert r2.result == Result.passed
assert r2.message == ""
test: Fix test case being integersfrom io import BytesIO
from zeus.artifacts.xunit import XunitHandler
from zeus.constants import Result
from zeus.models import Job
from zeus.utils.testresult import TestResult as ZeusTestResult
def test_result_generation(sample_xunit):
job = Job()
fp = BytesIO(sample_xunit.encode("utf8"))
handler = XunitHandler(job)
results = handler.get_tests(fp)
assert len(results) == 2
r1 = results[0]
assert type(r1) == ZeusTestResult
assert r1.job == job
assert r1.name == "tests.test_report"
assert r1.duration == 0
assert r1.result == Result.failed
assert (
r1.message
== """tests/test_report.py:1: in <module>
> import mock
E ImportError: No module named mock"""
)
r2 = results[1]
assert type(r2) == ZeusTestResult
assert r2.job == job
assert r2.name == "tests.test_report.ParseTestResultsTest.test_simple"
assert r2.duration == 1
assert r2.result == Result.passed
assert r2.message == ""
|
<commit_before>from io import BytesIO
from zeus.artifacts.xunit import XunitHandler
from zeus.constants import Result
from zeus.models import Job
from zeus.utils.testresult import TestResult as ZeusTestResult
def test_result_generation(sample_xunit):
job = Job()
fp = BytesIO(sample_xunit.encode("utf8"))
handler = XunitHandler(job)
results = handler.get_tests(fp)
assert len(results) == 2
r1 = results[0]
assert type(r1) == ZeusTestResult
assert r1.job == job
assert r1.name == "tests.test_report"
assert r1.duration == 0.0
assert r1.result == Result.failed
assert (
r1.message
== """tests/test_report.py:1: in <module>
> import mock
E ImportError: No module named mock"""
)
r2 = results[1]
assert type(r2) == ZeusTestResult
assert r2.job == job
assert r2.name == "tests.test_report.ParseTestResultsTest.test_simple"
assert r2.duration == 1.65796279907
assert r2.result == Result.passed
assert r2.message == ""
<commit_msg>test: Fix test case being integers<commit_after>from io import BytesIO
from zeus.artifacts.xunit import XunitHandler
from zeus.constants import Result
from zeus.models import Job
from zeus.utils.testresult import TestResult as ZeusTestResult
def test_result_generation(sample_xunit):
job = Job()
fp = BytesIO(sample_xunit.encode("utf8"))
handler = XunitHandler(job)
results = handler.get_tests(fp)
assert len(results) == 2
r1 = results[0]
assert type(r1) == ZeusTestResult
assert r1.job == job
assert r1.name == "tests.test_report"
assert r1.duration == 0
assert r1.result == Result.failed
assert (
r1.message
== """tests/test_report.py:1: in <module>
> import mock
E ImportError: No module named mock"""
)
r2 = results[1]
assert type(r2) == ZeusTestResult
assert r2.job == job
assert r2.name == "tests.test_report.ParseTestResultsTest.test_simple"
assert r2.duration == 1
assert r2.result == Result.passed
assert r2.message == ""
|
533569965f23f9425a4ee07f4e613f0a843792ec
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name = 'macroeco',
version = 1.0,
packages = find_packages(),
entry_points = {'console_scripts': ['mecodesktop=macroeco:mecodesktop',],},
package_data = {'': ['*.txt', '*.csv']},
author = 'Justin Kitzes and Mark Wilber',
author_email = '[email protected]',
description = 'Ecological pattern analysis in Python',
long_description = open('README.rst').read(),
license = 'BSD',
keywords = ('ecology biology environment conservation biodiversity '
'informatics data science'),
url = 'http://github.com/jkitzes/macroeco',
classifiers = [
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
],
install_requires = [
'numpy>=1.6',
'scipy>=0.12',
'pandas>=0.14',
'matplotlib>=1.3',
'mpmath>=0.19',
'configparser',
'decorator',
# 'shapely', # Do not force install if user doesn't have
# 'wxpython',
],
)
# python setup.py sdist bdist_egg upload -r https://testpypi.python.org/pypi
|
from setuptools import setup, find_packages
setup(
name = 'macroeco',
version = 1.0,
packages = find_packages(),
# entry_points = {'console_scripts': ['mecodesktop=macroeco:mecodesktop',],},
package_data = {'': ['*.txt', '*.csv']},
author = 'Justin Kitzes and Mark Wilber',
author_email = '[email protected]',
description = 'Ecological pattern analysis in Python',
long_description = open('README.rst').read(),
license = 'BSD',
keywords = ('ecology biology environment conservation biodiversity '
'informatics data science'),
url = 'http://github.com/jkitzes/macroeco',
classifiers = [
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
],
install_requires = [
'numpy>=1.6',
'scipy>=0.12',
'pandas>=0.14',
'matplotlib>=1.3',
'mpmath>=0.19',
'configparser',
'decorator',
# 'shapely', # Do not force install if user doesn't have
# 'wxpython',
],
)
# python setup.py sdist bdist_egg upload -r https://testpypi.python.org/pypi
|
Remove entry point mecodesktop script
|
Remove entry point mecodesktop script
MacroecoDesktop is now called using the python -c syntax instead of an
entry script.
|
Python
|
bsd-2-clause
|
jkitzes/macroeco
|
from setuptools import setup, find_packages
setup(
name = 'macroeco',
version = 1.0,
packages = find_packages(),
entry_points = {'console_scripts': ['mecodesktop=macroeco:mecodesktop',],},
package_data = {'': ['*.txt', '*.csv']},
author = 'Justin Kitzes and Mark Wilber',
author_email = '[email protected]',
description = 'Ecological pattern analysis in Python',
long_description = open('README.rst').read(),
license = 'BSD',
keywords = ('ecology biology environment conservation biodiversity '
'informatics data science'),
url = 'http://github.com/jkitzes/macroeco',
classifiers = [
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
],
install_requires = [
'numpy>=1.6',
'scipy>=0.12',
'pandas>=0.14',
'matplotlib>=1.3',
'mpmath>=0.19',
'configparser',
'decorator',
# 'shapely', # Do not force install if user doesn't have
# 'wxpython',
],
)
# python setup.py sdist bdist_egg upload -r https://testpypi.python.org/pypi
Remove entry point mecodesktop script
MacroecoDesktop is now called using the python -c syntax instead of an
entry script.
|
from setuptools import setup, find_packages
setup(
name = 'macroeco',
version = 1.0,
packages = find_packages(),
# entry_points = {'console_scripts': ['mecodesktop=macroeco:mecodesktop',],},
package_data = {'': ['*.txt', '*.csv']},
author = 'Justin Kitzes and Mark Wilber',
author_email = '[email protected]',
description = 'Ecological pattern analysis in Python',
long_description = open('README.rst').read(),
license = 'BSD',
keywords = ('ecology biology environment conservation biodiversity '
'informatics data science'),
url = 'http://github.com/jkitzes/macroeco',
classifiers = [
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
],
install_requires = [
'numpy>=1.6',
'scipy>=0.12',
'pandas>=0.14',
'matplotlib>=1.3',
'mpmath>=0.19',
'configparser',
'decorator',
# 'shapely', # Do not force install if user doesn't have
# 'wxpython',
],
)
# python setup.py sdist bdist_egg upload -r https://testpypi.python.org/pypi
|
<commit_before>from setuptools import setup, find_packages
setup(
name = 'macroeco',
version = 1.0,
packages = find_packages(),
entry_points = {'console_scripts': ['mecodesktop=macroeco:mecodesktop',],},
package_data = {'': ['*.txt', '*.csv']},
author = 'Justin Kitzes and Mark Wilber',
author_email = '[email protected]',
description = 'Ecological pattern analysis in Python',
long_description = open('README.rst').read(),
license = 'BSD',
keywords = ('ecology biology environment conservation biodiversity '
'informatics data science'),
url = 'http://github.com/jkitzes/macroeco',
classifiers = [
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
],
install_requires = [
'numpy>=1.6',
'scipy>=0.12',
'pandas>=0.14',
'matplotlib>=1.3',
'mpmath>=0.19',
'configparser',
'decorator',
# 'shapely', # Do not force install if user doesn't have
# 'wxpython',
],
)
# python setup.py sdist bdist_egg upload -r https://testpypi.python.org/pypi
<commit_msg>Remove entry point mecodesktop script
MacroecoDesktop is now called using the python -c syntax instead of an
entry script.<commit_after>
|
from setuptools import setup, find_packages
setup(
name = 'macroeco',
version = 1.0,
packages = find_packages(),
# entry_points = {'console_scripts': ['mecodesktop=macroeco:mecodesktop',],},
package_data = {'': ['*.txt', '*.csv']},
author = 'Justin Kitzes and Mark Wilber',
author_email = '[email protected]',
description = 'Ecological pattern analysis in Python',
long_description = open('README.rst').read(),
license = 'BSD',
keywords = ('ecology biology environment conservation biodiversity '
'informatics data science'),
url = 'http://github.com/jkitzes/macroeco',
classifiers = [
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
],
install_requires = [
'numpy>=1.6',
'scipy>=0.12',
'pandas>=0.14',
'matplotlib>=1.3',
'mpmath>=0.19',
'configparser',
'decorator',
# 'shapely', # Do not force install if user doesn't have
# 'wxpython',
],
)
# python setup.py sdist bdist_egg upload -r https://testpypi.python.org/pypi
|
from setuptools import setup, find_packages
setup(
name = 'macroeco',
version = 1.0,
packages = find_packages(),
entry_points = {'console_scripts': ['mecodesktop=macroeco:mecodesktop',],},
package_data = {'': ['*.txt', '*.csv']},
author = 'Justin Kitzes and Mark Wilber',
author_email = '[email protected]',
description = 'Ecological pattern analysis in Python',
long_description = open('README.rst').read(),
license = 'BSD',
keywords = ('ecology biology environment conservation biodiversity '
'informatics data science'),
url = 'http://github.com/jkitzes/macroeco',
classifiers = [
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
],
install_requires = [
'numpy>=1.6',
'scipy>=0.12',
'pandas>=0.14',
'matplotlib>=1.3',
'mpmath>=0.19',
'configparser',
'decorator',
# 'shapely', # Do not force install if user doesn't have
# 'wxpython',
],
)
# python setup.py sdist bdist_egg upload -r https://testpypi.python.org/pypi
Remove entry point mecodesktop script
MacroecoDesktop is now called using the python -c syntax instead of an
entry script.from setuptools import setup, find_packages
setup(
name = 'macroeco',
version = 1.0,
packages = find_packages(),
# entry_points = {'console_scripts': ['mecodesktop=macroeco:mecodesktop',],},
package_data = {'': ['*.txt', '*.csv']},
author = 'Justin Kitzes and Mark Wilber',
author_email = '[email protected]',
description = 'Ecological pattern analysis in Python',
long_description = open('README.rst').read(),
license = 'BSD',
keywords = ('ecology biology environment conservation biodiversity '
'informatics data science'),
url = 'http://github.com/jkitzes/macroeco',
classifiers = [
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
],
install_requires = [
'numpy>=1.6',
'scipy>=0.12',
'pandas>=0.14',
'matplotlib>=1.3',
'mpmath>=0.19',
'configparser',
'decorator',
# 'shapely', # Do not force install if user doesn't have
# 'wxpython',
],
)
# python setup.py sdist bdist_egg upload -r https://testpypi.python.org/pypi
|
<commit_before>from setuptools import setup, find_packages
setup(
name = 'macroeco',
version = 1.0,
packages = find_packages(),
entry_points = {'console_scripts': ['mecodesktop=macroeco:mecodesktop',],},
package_data = {'': ['*.txt', '*.csv']},
author = 'Justin Kitzes and Mark Wilber',
author_email = '[email protected]',
description = 'Ecological pattern analysis in Python',
long_description = open('README.rst').read(),
license = 'BSD',
keywords = ('ecology biology environment conservation biodiversity '
'informatics data science'),
url = 'http://github.com/jkitzes/macroeco',
classifiers = [
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
],
install_requires = [
'numpy>=1.6',
'scipy>=0.12',
'pandas>=0.14',
'matplotlib>=1.3',
'mpmath>=0.19',
'configparser',
'decorator',
# 'shapely', # Do not force install if user doesn't have
# 'wxpython',
],
)
# python setup.py sdist bdist_egg upload -r https://testpypi.python.org/pypi
<commit_msg>Remove entry point mecodesktop script
MacroecoDesktop is now called using the python -c syntax instead of an
entry script.<commit_after>from setuptools import setup, find_packages
setup(
name = 'macroeco',
version = 1.0,
packages = find_packages(),
# entry_points = {'console_scripts': ['mecodesktop=macroeco:mecodesktop',],},
package_data = {'': ['*.txt', '*.csv']},
author = 'Justin Kitzes and Mark Wilber',
author_email = '[email protected]',
description = 'Ecological pattern analysis in Python',
long_description = open('README.rst').read(),
license = 'BSD',
keywords = ('ecology biology environment conservation biodiversity '
'informatics data science'),
url = 'http://github.com/jkitzes/macroeco',
classifiers = [
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
],
install_requires = [
'numpy>=1.6',
'scipy>=0.12',
'pandas>=0.14',
'matplotlib>=1.3',
'mpmath>=0.19',
'configparser',
'decorator',
# 'shapely', # Do not force install if user doesn't have
# 'wxpython',
],
)
# python setup.py sdist bdist_egg upload -r https://testpypi.python.org/pypi
|
5dcad55d1e911a9c602fab467e64d9e9671373ac
|
setup.py
|
setup.py
|
# SPDX-FileCopyrightText: 2014 The python-scsi Authors
#
# SPDX-License-Identifier: LGPL-2.1-or-later
# coding: utf-8
from setuptools import find_packages, setup
import setuptools_scm # noqa: F401 # Ensure it's present.
setup(
packages=find_packages(exclude=["tests"]),
python_requires='~=3.7',
extras_require={
'dev': [
'isort',
'mypy',
'pre-commit',
'pytest',
'pytest-mypy',
'setuptools>=42',
'setuptools_scm[toml]>=3.4',
'wheel',
],
'iscsi': ['cython-iscsi'],
'sgio': ['cython-sgio'],
},
)
|
# SPDX-FileCopyrightText: 2014 The python-scsi Authors
#
# SPDX-License-Identifier: LGPL-2.1-or-later
# coding: utf-8
from setuptools import find_packages, setup
import setuptools_scm # noqa: F401 # Ensure it's present.
setup(
packages=find_packages(exclude=["tests"]),
python_requires='~=3.7',
extras_require={
'dev': [
'isort',
'mypy',
'pre-commit',
'pytest',
'pytest-mypy',
'setuptools>=42',
'setuptools_scm[toml]>=3.4',
'wheel',
],
'iscsi': ['cython-iscsi'],
'sgio': ['cython-sgio>=1.1.2'],
},
)
|
Add a minimum cython-sgio version to the dependencies.
|
Add a minimum cython-sgio version to the dependencies.
This makes sure that only the _fixed_ cython-sgio version is used.
|
Python
|
lgpl-2.1
|
rosjat/python-scsi
|
# SPDX-FileCopyrightText: 2014 The python-scsi Authors
#
# SPDX-License-Identifier: LGPL-2.1-or-later
# coding: utf-8
from setuptools import find_packages, setup
import setuptools_scm # noqa: F401 # Ensure it's present.
setup(
packages=find_packages(exclude=["tests"]),
python_requires='~=3.7',
extras_require={
'dev': [
'isort',
'mypy',
'pre-commit',
'pytest',
'pytest-mypy',
'setuptools>=42',
'setuptools_scm[toml]>=3.4',
'wheel',
],
'iscsi': ['cython-iscsi'],
'sgio': ['cython-sgio'],
},
)
Add a minimum cython-sgio version to the dependencies.
This makes sure that only the _fixed_ cython-sgio version is used.
|
# SPDX-FileCopyrightText: 2014 The python-scsi Authors
#
# SPDX-License-Identifier: LGPL-2.1-or-later
# coding: utf-8
from setuptools import find_packages, setup
import setuptools_scm # noqa: F401 # Ensure it's present.
setup(
packages=find_packages(exclude=["tests"]),
python_requires='~=3.7',
extras_require={
'dev': [
'isort',
'mypy',
'pre-commit',
'pytest',
'pytest-mypy',
'setuptools>=42',
'setuptools_scm[toml]>=3.4',
'wheel',
],
'iscsi': ['cython-iscsi'],
'sgio': ['cython-sgio>=1.1.2'],
},
)
|
<commit_before># SPDX-FileCopyrightText: 2014 The python-scsi Authors
#
# SPDX-License-Identifier: LGPL-2.1-or-later
# coding: utf-8
from setuptools import find_packages, setup
import setuptools_scm # noqa: F401 # Ensure it's present.
setup(
packages=find_packages(exclude=["tests"]),
python_requires='~=3.7',
extras_require={
'dev': [
'isort',
'mypy',
'pre-commit',
'pytest',
'pytest-mypy',
'setuptools>=42',
'setuptools_scm[toml]>=3.4',
'wheel',
],
'iscsi': ['cython-iscsi'],
'sgio': ['cython-sgio'],
},
)
<commit_msg>Add a minimum cython-sgio version to the dependencies.
This makes sure that only the _fixed_ cython-sgio version is used.<commit_after>
|
# SPDX-FileCopyrightText: 2014 The python-scsi Authors
#
# SPDX-License-Identifier: LGPL-2.1-or-later
# coding: utf-8
from setuptools import find_packages, setup
import setuptools_scm # noqa: F401 # Ensure it's present.
setup(
packages=find_packages(exclude=["tests"]),
python_requires='~=3.7',
extras_require={
'dev': [
'isort',
'mypy',
'pre-commit',
'pytest',
'pytest-mypy',
'setuptools>=42',
'setuptools_scm[toml]>=3.4',
'wheel',
],
'iscsi': ['cython-iscsi'],
'sgio': ['cython-sgio>=1.1.2'],
},
)
|
# SPDX-FileCopyrightText: 2014 The python-scsi Authors
#
# SPDX-License-Identifier: LGPL-2.1-or-later
# coding: utf-8
from setuptools import find_packages, setup
import setuptools_scm # noqa: F401 # Ensure it's present.
setup(
packages=find_packages(exclude=["tests"]),
python_requires='~=3.7',
extras_require={
'dev': [
'isort',
'mypy',
'pre-commit',
'pytest',
'pytest-mypy',
'setuptools>=42',
'setuptools_scm[toml]>=3.4',
'wheel',
],
'iscsi': ['cython-iscsi'],
'sgio': ['cython-sgio'],
},
)
Add a minimum cython-sgio version to the dependencies.
This makes sure that only the _fixed_ cython-sgio version is used.# SPDX-FileCopyrightText: 2014 The python-scsi Authors
#
# SPDX-License-Identifier: LGPL-2.1-or-later
# coding: utf-8
from setuptools import find_packages, setup
import setuptools_scm # noqa: F401 # Ensure it's present.
setup(
packages=find_packages(exclude=["tests"]),
python_requires='~=3.7',
extras_require={
'dev': [
'isort',
'mypy',
'pre-commit',
'pytest',
'pytest-mypy',
'setuptools>=42',
'setuptools_scm[toml]>=3.4',
'wheel',
],
'iscsi': ['cython-iscsi'],
'sgio': ['cython-sgio>=1.1.2'],
},
)
|
<commit_before># SPDX-FileCopyrightText: 2014 The python-scsi Authors
#
# SPDX-License-Identifier: LGPL-2.1-or-later
# coding: utf-8
from setuptools import find_packages, setup
import setuptools_scm # noqa: F401 # Ensure it's present.
setup(
packages=find_packages(exclude=["tests"]),
python_requires='~=3.7',
extras_require={
'dev': [
'isort',
'mypy',
'pre-commit',
'pytest',
'pytest-mypy',
'setuptools>=42',
'setuptools_scm[toml]>=3.4',
'wheel',
],
'iscsi': ['cython-iscsi'],
'sgio': ['cython-sgio'],
},
)
<commit_msg>Add a minimum cython-sgio version to the dependencies.
This makes sure that only the _fixed_ cython-sgio version is used.<commit_after># SPDX-FileCopyrightText: 2014 The python-scsi Authors
#
# SPDX-License-Identifier: LGPL-2.1-or-later
# coding: utf-8
from setuptools import find_packages, setup
import setuptools_scm # noqa: F401 # Ensure it's present.
setup(
packages=find_packages(exclude=["tests"]),
python_requires='~=3.7',
extras_require={
'dev': [
'isort',
'mypy',
'pre-commit',
'pytest',
'pytest-mypy',
'setuptools>=42',
'setuptools_scm[toml]>=3.4',
'wheel',
],
'iscsi': ['cython-iscsi'],
'sgio': ['cython-sgio>=1.1.2'],
},
)
|
3ca2203a977f6d25c780e7a6168a16c4f7dec732
|
setup.py
|
setup.py
|
import os
from codecs import open
from setuptools import setup, find_packages
repo_path = os.path.abspath(os.path.dirname(__file__))
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except (ImportError, OSError):
long_description = open('README.md').read()
with open(os.path.join(repo_path, 'requirements.txt')) as f:
requirements = f.read().splitlines()
setup(
name='django-intercoolerjs',
version='1.2.3.0',
url="https://github.com/brejoc/django-intercoolerjs",
description='Django wrapper for intercooler.js - AJAX With Attributes: There is no need to be complex.',
long_description=long_description,
author='Jochen Breuer',
author_email='[email protected]',
license='MIT',
keywords='django jquery staticfiles intercoolerjs'.split(),
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
packages=find_packages(),
package_data={'intercoolerjs': ['static/intercoolerjs/js//*']},
install_requires=requirements,
zip_safe=False,
)
|
import os
from codecs import open
from setuptools import setup, find_packages
repo_path = os.path.abspath(os.path.dirname(__file__))
with open('README.md', encoding='utf-8') as f:
long_description = f.read()
with open(os.path.join(repo_path, 'requirements.txt')) as f:
requirements = f.read().splitlines()
setup(
name='django-intercoolerjs',
version='1.2.3.0',
url="https://github.com/brejoc/django-intercoolerjs",
description='Django wrapper for intercooler.js - AJAX With Attributes: There is no need to be complex.',
long_description=long_description,
long_description_content_type='text/markdown',
author='Jochen Breuer',
author_email='[email protected]',
license='MIT',
keywords='django jquery staticfiles intercoolerjs'.split(),
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
packages=find_packages(),
package_data={'intercoolerjs': ['static/intercoolerjs/js//*']},
install_requires=requirements,
zip_safe=False,
)
|
Switch to proper markdown for long description
|
Switch to proper markdown for long description
|
Python
|
mit
|
brejoc/django-intercoolerjs
|
import os
from codecs import open
from setuptools import setup, find_packages
repo_path = os.path.abspath(os.path.dirname(__file__))
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except (ImportError, OSError):
long_description = open('README.md').read()
with open(os.path.join(repo_path, 'requirements.txt')) as f:
requirements = f.read().splitlines()
setup(
name='django-intercoolerjs',
version='1.2.3.0',
url="https://github.com/brejoc/django-intercoolerjs",
description='Django wrapper for intercooler.js - AJAX With Attributes: There is no need to be complex.',
long_description=long_description,
author='Jochen Breuer',
author_email='[email protected]',
license='MIT',
keywords='django jquery staticfiles intercoolerjs'.split(),
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
packages=find_packages(),
package_data={'intercoolerjs': ['static/intercoolerjs/js//*']},
install_requires=requirements,
zip_safe=False,
)
Switch to proper markdown for long description
|
import os
from codecs import open
from setuptools import setup, find_packages
repo_path = os.path.abspath(os.path.dirname(__file__))
with open('README.md', encoding='utf-8') as f:
long_description = f.read()
with open(os.path.join(repo_path, 'requirements.txt')) as f:
requirements = f.read().splitlines()
setup(
name='django-intercoolerjs',
version='1.2.3.0',
url="https://github.com/brejoc/django-intercoolerjs",
description='Django wrapper for intercooler.js - AJAX With Attributes: There is no need to be complex.',
long_description=long_description,
long_description_content_type='text/markdown',
author='Jochen Breuer',
author_email='[email protected]',
license='MIT',
keywords='django jquery staticfiles intercoolerjs'.split(),
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
packages=find_packages(),
package_data={'intercoolerjs': ['static/intercoolerjs/js//*']},
install_requires=requirements,
zip_safe=False,
)
|
<commit_before>import os
from codecs import open
from setuptools import setup, find_packages
repo_path = os.path.abspath(os.path.dirname(__file__))
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except (ImportError, OSError):
long_description = open('README.md').read()
with open(os.path.join(repo_path, 'requirements.txt')) as f:
requirements = f.read().splitlines()
setup(
name='django-intercoolerjs',
version='1.2.3.0',
url="https://github.com/brejoc/django-intercoolerjs",
description='Django wrapper for intercooler.js - AJAX With Attributes: There is no need to be complex.',
long_description=long_description,
author='Jochen Breuer',
author_email='[email protected]',
license='MIT',
keywords='django jquery staticfiles intercoolerjs'.split(),
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
packages=find_packages(),
package_data={'intercoolerjs': ['static/intercoolerjs/js//*']},
install_requires=requirements,
zip_safe=False,
)
<commit_msg>Switch to proper markdown for long description<commit_after>
|
import os
from codecs import open
from setuptools import setup, find_packages
repo_path = os.path.abspath(os.path.dirname(__file__))
with open('README.md', encoding='utf-8') as f:
long_description = f.read()
with open(os.path.join(repo_path, 'requirements.txt')) as f:
requirements = f.read().splitlines()
setup(
name='django-intercoolerjs',
version='1.2.3.0',
url="https://github.com/brejoc/django-intercoolerjs",
description='Django wrapper for intercooler.js - AJAX With Attributes: There is no need to be complex.',
long_description=long_description,
long_description_content_type='text/markdown',
author='Jochen Breuer',
author_email='[email protected]',
license='MIT',
keywords='django jquery staticfiles intercoolerjs'.split(),
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
packages=find_packages(),
package_data={'intercoolerjs': ['static/intercoolerjs/js//*']},
install_requires=requirements,
zip_safe=False,
)
|
import os
from codecs import open
from setuptools import setup, find_packages
repo_path = os.path.abspath(os.path.dirname(__file__))
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except (ImportError, OSError):
long_description = open('README.md').read()
with open(os.path.join(repo_path, 'requirements.txt')) as f:
requirements = f.read().splitlines()
setup(
name='django-intercoolerjs',
version='1.2.3.0',
url="https://github.com/brejoc/django-intercoolerjs",
description='Django wrapper for intercooler.js - AJAX With Attributes: There is no need to be complex.',
long_description=long_description,
author='Jochen Breuer',
author_email='[email protected]',
license='MIT',
keywords='django jquery staticfiles intercoolerjs'.split(),
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
packages=find_packages(),
package_data={'intercoolerjs': ['static/intercoolerjs/js//*']},
install_requires=requirements,
zip_safe=False,
)
Switch to proper markdown for long descriptionimport os
from codecs import open
from setuptools import setup, find_packages
repo_path = os.path.abspath(os.path.dirname(__file__))
with open('README.md', encoding='utf-8') as f:
long_description = f.read()
with open(os.path.join(repo_path, 'requirements.txt')) as f:
requirements = f.read().splitlines()
setup(
name='django-intercoolerjs',
version='1.2.3.0',
url="https://github.com/brejoc/django-intercoolerjs",
description='Django wrapper for intercooler.js - AJAX With Attributes: There is no need to be complex.',
long_description=long_description,
long_description_content_type='text/markdown',
author='Jochen Breuer',
author_email='[email protected]',
license='MIT',
keywords='django jquery staticfiles intercoolerjs'.split(),
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
packages=find_packages(),
package_data={'intercoolerjs': ['static/intercoolerjs/js//*']},
install_requires=requirements,
zip_safe=False,
)
|
<commit_before>import os
from codecs import open
from setuptools import setup, find_packages
repo_path = os.path.abspath(os.path.dirname(__file__))
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except (ImportError, OSError):
long_description = open('README.md').read()
with open(os.path.join(repo_path, 'requirements.txt')) as f:
requirements = f.read().splitlines()
setup(
name='django-intercoolerjs',
version='1.2.3.0',
url="https://github.com/brejoc/django-intercoolerjs",
description='Django wrapper for intercooler.js - AJAX With Attributes: There is no need to be complex.',
long_description=long_description,
author='Jochen Breuer',
author_email='[email protected]',
license='MIT',
keywords='django jquery staticfiles intercoolerjs'.split(),
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
packages=find_packages(),
package_data={'intercoolerjs': ['static/intercoolerjs/js//*']},
install_requires=requirements,
zip_safe=False,
)
<commit_msg>Switch to proper markdown for long description<commit_after>import os
from codecs import open
from setuptools import setup, find_packages
repo_path = os.path.abspath(os.path.dirname(__file__))
with open('README.md', encoding='utf-8') as f:
long_description = f.read()
with open(os.path.join(repo_path, 'requirements.txt')) as f:
requirements = f.read().splitlines()
setup(
name='django-intercoolerjs',
version='1.2.3.0',
url="https://github.com/brejoc/django-intercoolerjs",
description='Django wrapper for intercooler.js - AJAX With Attributes: There is no need to be complex.',
long_description=long_description,
long_description_content_type='text/markdown',
author='Jochen Breuer',
author_email='[email protected]',
license='MIT',
keywords='django jquery staticfiles intercoolerjs'.split(),
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
packages=find_packages(),
package_data={'intercoolerjs': ['static/intercoolerjs/js//*']},
install_requires=requirements,
zip_safe=False,
)
|
cfde8a339c52c1875cb3b863ace3cad6174eb54c
|
account_cost_spread/models/account_invoice.py
|
account_cost_spread/models/account_invoice.py
|
# Copyright 2016-2018 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import api, models
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
@api.multi
def action_move_create(self):
"""Override, button Validate on invoices."""
res = super(AccountInvoice, self).action_move_create()
for rec in self:
rec.invoice_line_ids.compute_spread_board()
return res
@api.multi
def invoice_line_move_line_get(self):
res = super(AccountInvoice, self).invoice_line_move_line_get()
for line in res:
invl_id = line.get('invl_id')
invl = self.env['account.invoice.line'].browse(invl_id)
if invl.spread_account_id:
line['account_id'] = invl.spread_account_id.id
return res
@api.multi
def action_invoice_cancel(self):
res = self.action_cancel()
for invoice in self:
for invoice_line in invoice.invoice_line_ids:
for spread_line in invoice_line.spread_line_ids:
if spread_line.move_id:
spread_line.move_id.button_cancel()
spread_line.move_id.unlink()
spread_line.unlink()
return res
|
# Copyright 2016-2018 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import api, models
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
@api.multi
def action_move_create(self):
"""Invoked when validating the invoices."""
res = super(AccountInvoice, self).action_move_create()
for rec in self:
rec.invoice_line_ids.compute_spread_board()
return res
@api.multi
def invoice_line_move_line_get(self):
res = super(AccountInvoice, self).invoice_line_move_line_get()
for line in res:
invl_id = line.get('invl_id')
invl = self.env['account.invoice.line'].browse(invl_id)
if invl.spread_account_id:
line['account_id'] = invl.spread_account_id.id
return res
@api.multi
def action_invoice_cancel(self):
res = self.action_cancel()
for invoice in self:
for invoice_line in invoice.invoice_line_ids:
for spread_line in invoice_line.spread_line_ids:
if spread_line.move_id:
spread_line.move_id.button_cancel()
spread_line.move_id.unlink()
spread_line.unlink()
return res
|
Fix method description in account_cost_spread
|
Fix method description in account_cost_spread
|
Python
|
agpl-3.0
|
onesteinbv/addons-onestein,onesteinbv/addons-onestein,onesteinbv/addons-onestein
|
# Copyright 2016-2018 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import api, models
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
@api.multi
def action_move_create(self):
"""Override, button Validate on invoices."""
res = super(AccountInvoice, self).action_move_create()
for rec in self:
rec.invoice_line_ids.compute_spread_board()
return res
@api.multi
def invoice_line_move_line_get(self):
res = super(AccountInvoice, self).invoice_line_move_line_get()
for line in res:
invl_id = line.get('invl_id')
invl = self.env['account.invoice.line'].browse(invl_id)
if invl.spread_account_id:
line['account_id'] = invl.spread_account_id.id
return res
@api.multi
def action_invoice_cancel(self):
res = self.action_cancel()
for invoice in self:
for invoice_line in invoice.invoice_line_ids:
for spread_line in invoice_line.spread_line_ids:
if spread_line.move_id:
spread_line.move_id.button_cancel()
spread_line.move_id.unlink()
spread_line.unlink()
return res
Fix method description in account_cost_spread
|
# Copyright 2016-2018 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import api, models
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
@api.multi
def action_move_create(self):
"""Invoked when validating the invoices."""
res = super(AccountInvoice, self).action_move_create()
for rec in self:
rec.invoice_line_ids.compute_spread_board()
return res
@api.multi
def invoice_line_move_line_get(self):
res = super(AccountInvoice, self).invoice_line_move_line_get()
for line in res:
invl_id = line.get('invl_id')
invl = self.env['account.invoice.line'].browse(invl_id)
if invl.spread_account_id:
line['account_id'] = invl.spread_account_id.id
return res
@api.multi
def action_invoice_cancel(self):
res = self.action_cancel()
for invoice in self:
for invoice_line in invoice.invoice_line_ids:
for spread_line in invoice_line.spread_line_ids:
if spread_line.move_id:
spread_line.move_id.button_cancel()
spread_line.move_id.unlink()
spread_line.unlink()
return res
|
<commit_before># Copyright 2016-2018 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import api, models
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
@api.multi
def action_move_create(self):
"""Override, button Validate on invoices."""
res = super(AccountInvoice, self).action_move_create()
for rec in self:
rec.invoice_line_ids.compute_spread_board()
return res
@api.multi
def invoice_line_move_line_get(self):
res = super(AccountInvoice, self).invoice_line_move_line_get()
for line in res:
invl_id = line.get('invl_id')
invl = self.env['account.invoice.line'].browse(invl_id)
if invl.spread_account_id:
line['account_id'] = invl.spread_account_id.id
return res
@api.multi
def action_invoice_cancel(self):
res = self.action_cancel()
for invoice in self:
for invoice_line in invoice.invoice_line_ids:
for spread_line in invoice_line.spread_line_ids:
if spread_line.move_id:
spread_line.move_id.button_cancel()
spread_line.move_id.unlink()
spread_line.unlink()
return res
<commit_msg>Fix method description in account_cost_spread<commit_after>
|
# Copyright 2016-2018 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import api, models
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
@api.multi
def action_move_create(self):
"""Invoked when validating the invoices."""
res = super(AccountInvoice, self).action_move_create()
for rec in self:
rec.invoice_line_ids.compute_spread_board()
return res
@api.multi
def invoice_line_move_line_get(self):
res = super(AccountInvoice, self).invoice_line_move_line_get()
for line in res:
invl_id = line.get('invl_id')
invl = self.env['account.invoice.line'].browse(invl_id)
if invl.spread_account_id:
line['account_id'] = invl.spread_account_id.id
return res
@api.multi
def action_invoice_cancel(self):
res = self.action_cancel()
for invoice in self:
for invoice_line in invoice.invoice_line_ids:
for spread_line in invoice_line.spread_line_ids:
if spread_line.move_id:
spread_line.move_id.button_cancel()
spread_line.move_id.unlink()
spread_line.unlink()
return res
|
# Copyright 2016-2018 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import api, models
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
@api.multi
def action_move_create(self):
"""Override, button Validate on invoices."""
res = super(AccountInvoice, self).action_move_create()
for rec in self:
rec.invoice_line_ids.compute_spread_board()
return res
@api.multi
def invoice_line_move_line_get(self):
res = super(AccountInvoice, self).invoice_line_move_line_get()
for line in res:
invl_id = line.get('invl_id')
invl = self.env['account.invoice.line'].browse(invl_id)
if invl.spread_account_id:
line['account_id'] = invl.spread_account_id.id
return res
@api.multi
def action_invoice_cancel(self):
res = self.action_cancel()
for invoice in self:
for invoice_line in invoice.invoice_line_ids:
for spread_line in invoice_line.spread_line_ids:
if spread_line.move_id:
spread_line.move_id.button_cancel()
spread_line.move_id.unlink()
spread_line.unlink()
return res
Fix method description in account_cost_spread# Copyright 2016-2018 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import api, models
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
@api.multi
def action_move_create(self):
"""Invoked when validating the invoices."""
res = super(AccountInvoice, self).action_move_create()
for rec in self:
rec.invoice_line_ids.compute_spread_board()
return res
@api.multi
def invoice_line_move_line_get(self):
res = super(AccountInvoice, self).invoice_line_move_line_get()
for line in res:
invl_id = line.get('invl_id')
invl = self.env['account.invoice.line'].browse(invl_id)
if invl.spread_account_id:
line['account_id'] = invl.spread_account_id.id
return res
@api.multi
def action_invoice_cancel(self):
res = self.action_cancel()
for invoice in self:
for invoice_line in invoice.invoice_line_ids:
for spread_line in invoice_line.spread_line_ids:
if spread_line.move_id:
spread_line.move_id.button_cancel()
spread_line.move_id.unlink()
spread_line.unlink()
return res
|
<commit_before># Copyright 2016-2018 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import api, models
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
@api.multi
def action_move_create(self):
"""Override, button Validate on invoices."""
res = super(AccountInvoice, self).action_move_create()
for rec in self:
rec.invoice_line_ids.compute_spread_board()
return res
@api.multi
def invoice_line_move_line_get(self):
res = super(AccountInvoice, self).invoice_line_move_line_get()
for line in res:
invl_id = line.get('invl_id')
invl = self.env['account.invoice.line'].browse(invl_id)
if invl.spread_account_id:
line['account_id'] = invl.spread_account_id.id
return res
@api.multi
def action_invoice_cancel(self):
res = self.action_cancel()
for invoice in self:
for invoice_line in invoice.invoice_line_ids:
for spread_line in invoice_line.spread_line_ids:
if spread_line.move_id:
spread_line.move_id.button_cancel()
spread_line.move_id.unlink()
spread_line.unlink()
return res
<commit_msg>Fix method description in account_cost_spread<commit_after># Copyright 2016-2018 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import api, models
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
@api.multi
def action_move_create(self):
"""Invoked when validating the invoices."""
res = super(AccountInvoice, self).action_move_create()
for rec in self:
rec.invoice_line_ids.compute_spread_board()
return res
@api.multi
def invoice_line_move_line_get(self):
res = super(AccountInvoice, self).invoice_line_move_line_get()
for line in res:
invl_id = line.get('invl_id')
invl = self.env['account.invoice.line'].browse(invl_id)
if invl.spread_account_id:
line['account_id'] = invl.spread_account_id.id
return res
@api.multi
def action_invoice_cancel(self):
res = self.action_cancel()
for invoice in self:
for invoice_line in invoice.invoice_line_ids:
for spread_line in invoice_line.spread_line_ids:
if spread_line.move_id:
spread_line.move_id.button_cancel()
spread_line.move_id.unlink()
spread_line.unlink()
return res
|
ab3c5e7709dc4eda89821c120d220fc9898ca03c
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
from os import path
here = path.abspath(path.dirname(__file__))
description = 'The official Python3 Domo API SDK - Domo, Inc.'
long_description = 'See https://github.com/domoinc/domo-python-sdk for more details.'
setup(
name='pydomo',
version='0.3.0.01',
description=description,
long_description=long_description,
author='Jeremy Morris',
author_email='[email protected]',
url='https://github.com/domoinc/domo-python-sdk',
download_url='https://github.com/domoinc/domo-python-sdk/tarball/0.2.2.1',
keywords='domo api sdk',
license='MIT',
packages=find_packages(exclude=['examples']),
install_requires=[
'requests',
'requests_toolbelt',
],
python_requires='>=3',
)
|
from setuptools import setup, find_packages
from os import path
here = path.abspath(path.dirname(__file__))
description = 'The official Python3 Domo API SDK - Domo, Inc.'
long_description = 'See https://github.com/domoinc/domo-python-sdk for more details.'
setup(
name='pydomo',
version='0.3.0.2',
description=description,
long_description=long_description,
author='Jeremy Morris',
author_email='[email protected]',
url='https://github.com/domoinc/domo-python-sdk',
download_url='https://github.com/domoinc/domo-python-sdk/tarball/0.2.2.1',
keywords='domo api sdk',
license='MIT',
packages=find_packages(exclude=['examples']),
install_requires=[
'requests',
'requests_toolbelt',
],
python_requires='>=3',
)
|
Update to prep for v0.3.0.2
|
Update to prep for v0.3.0.2
|
Python
|
mit
|
domoinc/domo-python-sdk,domoinc/domo-python-sdk
|
from setuptools import setup, find_packages
from os import path
here = path.abspath(path.dirname(__file__))
description = 'The official Python3 Domo API SDK - Domo, Inc.'
long_description = 'See https://github.com/domoinc/domo-python-sdk for more details.'
setup(
name='pydomo',
version='0.3.0.01',
description=description,
long_description=long_description,
author='Jeremy Morris',
author_email='[email protected]',
url='https://github.com/domoinc/domo-python-sdk',
download_url='https://github.com/domoinc/domo-python-sdk/tarball/0.2.2.1',
keywords='domo api sdk',
license='MIT',
packages=find_packages(exclude=['examples']),
install_requires=[
'requests',
'requests_toolbelt',
],
python_requires='>=3',
)
Update to prep for v0.3.0.2
|
from setuptools import setup, find_packages
from os import path
here = path.abspath(path.dirname(__file__))
description = 'The official Python3 Domo API SDK - Domo, Inc.'
long_description = 'See https://github.com/domoinc/domo-python-sdk for more details.'
setup(
name='pydomo',
version='0.3.0.2',
description=description,
long_description=long_description,
author='Jeremy Morris',
author_email='[email protected]',
url='https://github.com/domoinc/domo-python-sdk',
download_url='https://github.com/domoinc/domo-python-sdk/tarball/0.2.2.1',
keywords='domo api sdk',
license='MIT',
packages=find_packages(exclude=['examples']),
install_requires=[
'requests',
'requests_toolbelt',
],
python_requires='>=3',
)
|
<commit_before>from setuptools import setup, find_packages
from os import path
here = path.abspath(path.dirname(__file__))
description = 'The official Python3 Domo API SDK - Domo, Inc.'
long_description = 'See https://github.com/domoinc/domo-python-sdk for more details.'
setup(
name='pydomo',
version='0.3.0.01',
description=description,
long_description=long_description,
author='Jeremy Morris',
author_email='[email protected]',
url='https://github.com/domoinc/domo-python-sdk',
download_url='https://github.com/domoinc/domo-python-sdk/tarball/0.2.2.1',
keywords='domo api sdk',
license='MIT',
packages=find_packages(exclude=['examples']),
install_requires=[
'requests',
'requests_toolbelt',
],
python_requires='>=3',
)
<commit_msg>Update to prep for v0.3.0.2<commit_after>
|
from setuptools import setup, find_packages
from os import path
here = path.abspath(path.dirname(__file__))
description = 'The official Python3 Domo API SDK - Domo, Inc.'
long_description = 'See https://github.com/domoinc/domo-python-sdk for more details.'
setup(
name='pydomo',
version='0.3.0.2',
description=description,
long_description=long_description,
author='Jeremy Morris',
author_email='[email protected]',
url='https://github.com/domoinc/domo-python-sdk',
download_url='https://github.com/domoinc/domo-python-sdk/tarball/0.2.2.1',
keywords='domo api sdk',
license='MIT',
packages=find_packages(exclude=['examples']),
install_requires=[
'requests',
'requests_toolbelt',
],
python_requires='>=3',
)
|
from setuptools import setup, find_packages
from os import path
here = path.abspath(path.dirname(__file__))
description = 'The official Python3 Domo API SDK - Domo, Inc.'
long_description = 'See https://github.com/domoinc/domo-python-sdk for more details.'
setup(
name='pydomo',
version='0.3.0.01',
description=description,
long_description=long_description,
author='Jeremy Morris',
author_email='[email protected]',
url='https://github.com/domoinc/domo-python-sdk',
download_url='https://github.com/domoinc/domo-python-sdk/tarball/0.2.2.1',
keywords='domo api sdk',
license='MIT',
packages=find_packages(exclude=['examples']),
install_requires=[
'requests',
'requests_toolbelt',
],
python_requires='>=3',
)
Update to prep for v0.3.0.2from setuptools import setup, find_packages
from os import path
here = path.abspath(path.dirname(__file__))
description = 'The official Python3 Domo API SDK - Domo, Inc.'
long_description = 'See https://github.com/domoinc/domo-python-sdk for more details.'
setup(
name='pydomo',
version='0.3.0.2',
description=description,
long_description=long_description,
author='Jeremy Morris',
author_email='[email protected]',
url='https://github.com/domoinc/domo-python-sdk',
download_url='https://github.com/domoinc/domo-python-sdk/tarball/0.2.2.1',
keywords='domo api sdk',
license='MIT',
packages=find_packages(exclude=['examples']),
install_requires=[
'requests',
'requests_toolbelt',
],
python_requires='>=3',
)
|
<commit_before>from setuptools import setup, find_packages
from os import path
here = path.abspath(path.dirname(__file__))
description = 'The official Python3 Domo API SDK - Domo, Inc.'
long_description = 'See https://github.com/domoinc/domo-python-sdk for more details.'
setup(
name='pydomo',
version='0.3.0.01',
description=description,
long_description=long_description,
author='Jeremy Morris',
author_email='[email protected]',
url='https://github.com/domoinc/domo-python-sdk',
download_url='https://github.com/domoinc/domo-python-sdk/tarball/0.2.2.1',
keywords='domo api sdk',
license='MIT',
packages=find_packages(exclude=['examples']),
install_requires=[
'requests',
'requests_toolbelt',
],
python_requires='>=3',
)
<commit_msg>Update to prep for v0.3.0.2<commit_after>from setuptools import setup, find_packages
from os import path
here = path.abspath(path.dirname(__file__))
description = 'The official Python3 Domo API SDK - Domo, Inc.'
long_description = 'See https://github.com/domoinc/domo-python-sdk for more details.'
setup(
name='pydomo',
version='0.3.0.2',
description=description,
long_description=long_description,
author='Jeremy Morris',
author_email='[email protected]',
url='https://github.com/domoinc/domo-python-sdk',
download_url='https://github.com/domoinc/domo-python-sdk/tarball/0.2.2.1',
keywords='domo api sdk',
license='MIT',
packages=find_packages(exclude=['examples']),
install_requires=[
'requests',
'requests_toolbelt',
],
python_requires='>=3',
)
|
f48b3dee81d0ce34740cfa65b499409f55a2588e
|
whip/web.py
|
whip/web.py
|
#!/usr/bin/env python
from flask import Flask, abort, make_response
from socket import inet_aton, error as socket_error
from .db import Database
app = Flask(__name__)
db = Database()
@app.route('/ip/<ip>')
def lookup(ip):
try:
k = inet_aton(ip)
except socket_error:
abort(400)
info_as_json = db.lookup(k)
if info_as_json is None:
abort(404)
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument('--host', default='localhost')
parser.add_argument('--port', default=5555, type=int)
parser.add_argument('--debug', default=False, action='store_true')
args = parser.parse_args()
try:
app.run(**vars(args))
except KeyboardInterrupt:
sys.stderr.write("Aborting...\n")
|
#!/usr/bin/env python
from flask import Flask, abort, make_response
from socket import inet_aton, error as socket_error
from .db import Database
app = Flask(__name__)
app.config.from_envvar('WHIP_SETTINGS')
db = Database(app.config['DATABASE_DIR'])
@app.route('/ip/<ip>')
def lookup(ip):
try:
k = inet_aton(ip)
except socket_error:
abort(400)
info_as_json = db.lookup(k)
if info_as_json is None:
abort(404)
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument('--host', default='localhost')
parser.add_argument('--port', default=5555, type=int)
parser.add_argument('--debug', default=False, action='store_true')
args = parser.parse_args()
try:
app.run(**vars(args))
except KeyboardInterrupt:
sys.stderr.write("Aborting...\n")
|
Use WHIP_SETTINGS environment var for Flask app
|
Use WHIP_SETTINGS environment var for Flask app
|
Python
|
bsd-3-clause
|
wbolster/whip
|
#!/usr/bin/env python
from flask import Flask, abort, make_response
from socket import inet_aton, error as socket_error
from .db import Database
app = Flask(__name__)
db = Database()
@app.route('/ip/<ip>')
def lookup(ip):
try:
k = inet_aton(ip)
except socket_error:
abort(400)
info_as_json = db.lookup(k)
if info_as_json is None:
abort(404)
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument('--host', default='localhost')
parser.add_argument('--port', default=5555, type=int)
parser.add_argument('--debug', default=False, action='store_true')
args = parser.parse_args()
try:
app.run(**vars(args))
except KeyboardInterrupt:
sys.stderr.write("Aborting...\n")
Use WHIP_SETTINGS environment var for Flask app
|
#!/usr/bin/env python
from flask import Flask, abort, make_response
from socket import inet_aton, error as socket_error
from .db import Database
app = Flask(__name__)
app.config.from_envvar('WHIP_SETTINGS')
db = Database(app.config['DATABASE_DIR'])
@app.route('/ip/<ip>')
def lookup(ip):
try:
k = inet_aton(ip)
except socket_error:
abort(400)
info_as_json = db.lookup(k)
if info_as_json is None:
abort(404)
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument('--host', default='localhost')
parser.add_argument('--port', default=5555, type=int)
parser.add_argument('--debug', default=False, action='store_true')
args = parser.parse_args()
try:
app.run(**vars(args))
except KeyboardInterrupt:
sys.stderr.write("Aborting...\n")
|
<commit_before>#!/usr/bin/env python
from flask import Flask, abort, make_response
from socket import inet_aton, error as socket_error
from .db import Database
app = Flask(__name__)
db = Database()
@app.route('/ip/<ip>')
def lookup(ip):
try:
k = inet_aton(ip)
except socket_error:
abort(400)
info_as_json = db.lookup(k)
if info_as_json is None:
abort(404)
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument('--host', default='localhost')
parser.add_argument('--port', default=5555, type=int)
parser.add_argument('--debug', default=False, action='store_true')
args = parser.parse_args()
try:
app.run(**vars(args))
except KeyboardInterrupt:
sys.stderr.write("Aborting...\n")
<commit_msg>Use WHIP_SETTINGS environment var for Flask app<commit_after>
|
#!/usr/bin/env python
from flask import Flask, abort, make_response
from socket import inet_aton, error as socket_error
from .db import Database
app = Flask(__name__)
app.config.from_envvar('WHIP_SETTINGS')
db = Database(app.config['DATABASE_DIR'])
@app.route('/ip/<ip>')
def lookup(ip):
try:
k = inet_aton(ip)
except socket_error:
abort(400)
info_as_json = db.lookup(k)
if info_as_json is None:
abort(404)
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument('--host', default='localhost')
parser.add_argument('--port', default=5555, type=int)
parser.add_argument('--debug', default=False, action='store_true')
args = parser.parse_args()
try:
app.run(**vars(args))
except KeyboardInterrupt:
sys.stderr.write("Aborting...\n")
|
#!/usr/bin/env python
from flask import Flask, abort, make_response
from socket import inet_aton, error as socket_error
from .db import Database
app = Flask(__name__)
db = Database()
@app.route('/ip/<ip>')
def lookup(ip):
try:
k = inet_aton(ip)
except socket_error:
abort(400)
info_as_json = db.lookup(k)
if info_as_json is None:
abort(404)
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument('--host', default='localhost')
parser.add_argument('--port', default=5555, type=int)
parser.add_argument('--debug', default=False, action='store_true')
args = parser.parse_args()
try:
app.run(**vars(args))
except KeyboardInterrupt:
sys.stderr.write("Aborting...\n")
Use WHIP_SETTINGS environment var for Flask app#!/usr/bin/env python
from flask import Flask, abort, make_response
from socket import inet_aton, error as socket_error
from .db import Database
app = Flask(__name__)
app.config.from_envvar('WHIP_SETTINGS')
db = Database(app.config['DATABASE_DIR'])
@app.route('/ip/<ip>')
def lookup(ip):
try:
k = inet_aton(ip)
except socket_error:
abort(400)
info_as_json = db.lookup(k)
if info_as_json is None:
abort(404)
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument('--host', default='localhost')
parser.add_argument('--port', default=5555, type=int)
parser.add_argument('--debug', default=False, action='store_true')
args = parser.parse_args()
try:
app.run(**vars(args))
except KeyboardInterrupt:
sys.stderr.write("Aborting...\n")
|
<commit_before>#!/usr/bin/env python
from flask import Flask, abort, make_response
from socket import inet_aton, error as socket_error
from .db import Database
app = Flask(__name__)
db = Database()
@app.route('/ip/<ip>')
def lookup(ip):
try:
k = inet_aton(ip)
except socket_error:
abort(400)
info_as_json = db.lookup(k)
if info_as_json is None:
abort(404)
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument('--host', default='localhost')
parser.add_argument('--port', default=5555, type=int)
parser.add_argument('--debug', default=False, action='store_true')
args = parser.parse_args()
try:
app.run(**vars(args))
except KeyboardInterrupt:
sys.stderr.write("Aborting...\n")
<commit_msg>Use WHIP_SETTINGS environment var for Flask app<commit_after>#!/usr/bin/env python
from flask import Flask, abort, make_response
from socket import inet_aton, error as socket_error
from .db import Database
app = Flask(__name__)
app.config.from_envvar('WHIP_SETTINGS')
db = Database(app.config['DATABASE_DIR'])
@app.route('/ip/<ip>')
def lookup(ip):
try:
k = inet_aton(ip)
except socket_error:
abort(400)
info_as_json = db.lookup(k)
if info_as_json is None:
abort(404)
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument('--host', default='localhost')
parser.add_argument('--port', default=5555, type=int)
parser.add_argument('--debug', default=False, action='store_true')
args = parser.parse_args()
try:
app.run(**vars(args))
except KeyboardInterrupt:
sys.stderr.write("Aborting...\n")
|
d9ed160e54ff40783a007154e194767af0574ec1
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import sys
if sys.version_info < (3,):
sys.exit("catimg requires Python 3")
from setuptools import setup
import versioneer
setup(
name='catimg',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
description='''Print an image of a cat from Imgur to iTerm2.''',
author='Aaron Meurer',
author_email='[email protected]',
url='https://github.com/asmeurer/catimg',
packages=['catimg'],
package_data={'catimg.tests': ['aloha_cat.png']},
long_description="""
catimg
Uses iTerm2's proprietary escape codes and Imgur to display an image of a cat
in your terminal.
NOTE: I do not own the images that you see, nor have I any control over
them. You will see some image that is tagged as "cat" on Imgur. That could be
anything. I do filter out images that are tagged NSFW, but there are no
guarantees that you won't see something you wish you hadn't. Use at your own
risk.
License: MIT
""",
entry_points={'console_scripts': [ 'catimg = catimg.__main__:main']},
install_requires=[
'requests',
'imgurpython',
],
license="MIT",
classifiers=[
'Environment :: MacOS X',
'Operating System :: MacOS :: MacOS X',
'Programming Language :: Python :: 3',
],
zip_safe=False,
)
|
#!/usr/bin/env python
import sys
if sys.version_info < (3,):
sys.exit("catimg requires Python 3")
from setuptools import setup
import versioneer
setup(
name='catimg',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
description='''Print an image of a cat from Imgur to iTerm2.''',
author='Aaron Meurer',
author_email='[email protected]',
url='https://github.com/asmeurer/catimg',
packages=['catimg', 'catimg.tests'],
package_data={'catimg.tests': ['aloha_cat.png']},
long_description="""
catimg
Uses iTerm2's proprietary escape codes and Imgur to display an image of a cat
in your terminal.
NOTE: I do not own the images that you see, nor have I any control over
them. You will see some image that is tagged as "cat" on Imgur. That could be
anything. I do filter out images that are tagged NSFW, but there are no
guarantees that you won't see something you wish you hadn't. Use at your own
risk.
License: MIT
""",
entry_points={'console_scripts': [ 'catimg = catimg.__main__:main']},
install_requires=[
'requests',
'imgurpython',
],
license="MIT",
classifiers=[
'Environment :: MacOS X',
'Operating System :: MacOS :: MacOS X',
'Programming Language :: Python :: 3',
],
zip_safe=False,
)
|
Include the tests in the install
|
Include the tests in the install
|
Python
|
mit
|
asmeurer/catimg
|
#!/usr/bin/env python
import sys
if sys.version_info < (3,):
sys.exit("catimg requires Python 3")
from setuptools import setup
import versioneer
setup(
name='catimg',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
description='''Print an image of a cat from Imgur to iTerm2.''',
author='Aaron Meurer',
author_email='[email protected]',
url='https://github.com/asmeurer/catimg',
packages=['catimg'],
package_data={'catimg.tests': ['aloha_cat.png']},
long_description="""
catimg
Uses iTerm2's proprietary escape codes and Imgur to display an image of a cat
in your terminal.
NOTE: I do not own the images that you see, nor have I any control over
them. You will see some image that is tagged as "cat" on Imgur. That could be
anything. I do filter out images that are tagged NSFW, but there are no
guarantees that you won't see something you wish you hadn't. Use at your own
risk.
License: MIT
""",
entry_points={'console_scripts': [ 'catimg = catimg.__main__:main']},
install_requires=[
'requests',
'imgurpython',
],
license="MIT",
classifiers=[
'Environment :: MacOS X',
'Operating System :: MacOS :: MacOS X',
'Programming Language :: Python :: 3',
],
zip_safe=False,
)
Include the tests in the install
|
#!/usr/bin/env python
import sys
if sys.version_info < (3,):
sys.exit("catimg requires Python 3")
from setuptools import setup
import versioneer
setup(
name='catimg',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
description='''Print an image of a cat from Imgur to iTerm2.''',
author='Aaron Meurer',
author_email='[email protected]',
url='https://github.com/asmeurer/catimg',
packages=['catimg', 'catimg.tests'],
package_data={'catimg.tests': ['aloha_cat.png']},
long_description="""
catimg
Uses iTerm2's proprietary escape codes and Imgur to display an image of a cat
in your terminal.
NOTE: I do not own the images that you see, nor have I any control over
them. You will see some image that is tagged as "cat" on Imgur. That could be
anything. I do filter out images that are tagged NSFW, but there are no
guarantees that you won't see something you wish you hadn't. Use at your own
risk.
License: MIT
""",
entry_points={'console_scripts': [ 'catimg = catimg.__main__:main']},
install_requires=[
'requests',
'imgurpython',
],
license="MIT",
classifiers=[
'Environment :: MacOS X',
'Operating System :: MacOS :: MacOS X',
'Programming Language :: Python :: 3',
],
zip_safe=False,
)
|
<commit_before>#!/usr/bin/env python
import sys
if sys.version_info < (3,):
sys.exit("catimg requires Python 3")
from setuptools import setup
import versioneer
setup(
name='catimg',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
description='''Print an image of a cat from Imgur to iTerm2.''',
author='Aaron Meurer',
author_email='[email protected]',
url='https://github.com/asmeurer/catimg',
packages=['catimg'],
package_data={'catimg.tests': ['aloha_cat.png']},
long_description="""
catimg
Uses iTerm2's proprietary escape codes and Imgur to display an image of a cat
in your terminal.
NOTE: I do not own the images that you see, nor have I any control over
them. You will see some image that is tagged as "cat" on Imgur. That could be
anything. I do filter out images that are tagged NSFW, but there are no
guarantees that you won't see something you wish you hadn't. Use at your own
risk.
License: MIT
""",
entry_points={'console_scripts': [ 'catimg = catimg.__main__:main']},
install_requires=[
'requests',
'imgurpython',
],
license="MIT",
classifiers=[
'Environment :: MacOS X',
'Operating System :: MacOS :: MacOS X',
'Programming Language :: Python :: 3',
],
zip_safe=False,
)
<commit_msg>Include the tests in the install<commit_after>
|
#!/usr/bin/env python
import sys
if sys.version_info < (3,):
sys.exit("catimg requires Python 3")
from setuptools import setup
import versioneer
setup(
name='catimg',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
description='''Print an image of a cat from Imgur to iTerm2.''',
author='Aaron Meurer',
author_email='[email protected]',
url='https://github.com/asmeurer/catimg',
packages=['catimg', 'catimg.tests'],
package_data={'catimg.tests': ['aloha_cat.png']},
long_description="""
catimg
Uses iTerm2's proprietary escape codes and Imgur to display an image of a cat
in your terminal.
NOTE: I do not own the images that you see, nor have I any control over
them. You will see some image that is tagged as "cat" on Imgur. That could be
anything. I do filter out images that are tagged NSFW, but there are no
guarantees that you won't see something you wish you hadn't. Use at your own
risk.
License: MIT
""",
entry_points={'console_scripts': [ 'catimg = catimg.__main__:main']},
install_requires=[
'requests',
'imgurpython',
],
license="MIT",
classifiers=[
'Environment :: MacOS X',
'Operating System :: MacOS :: MacOS X',
'Programming Language :: Python :: 3',
],
zip_safe=False,
)
|
#!/usr/bin/env python
import sys
if sys.version_info < (3,):
sys.exit("catimg requires Python 3")
from setuptools import setup
import versioneer
setup(
name='catimg',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
description='''Print an image of a cat from Imgur to iTerm2.''',
author='Aaron Meurer',
author_email='[email protected]',
url='https://github.com/asmeurer/catimg',
packages=['catimg'],
package_data={'catimg.tests': ['aloha_cat.png']},
long_description="""
catimg
Uses iTerm2's proprietary escape codes and Imgur to display an image of a cat
in your terminal.
NOTE: I do not own the images that you see, nor have I any control over
them. You will see some image that is tagged as "cat" on Imgur. That could be
anything. I do filter out images that are tagged NSFW, but there are no
guarantees that you won't see something you wish you hadn't. Use at your own
risk.
License: MIT
""",
entry_points={'console_scripts': [ 'catimg = catimg.__main__:main']},
install_requires=[
'requests',
'imgurpython',
],
license="MIT",
classifiers=[
'Environment :: MacOS X',
'Operating System :: MacOS :: MacOS X',
'Programming Language :: Python :: 3',
],
zip_safe=False,
)
Include the tests in the install#!/usr/bin/env python
import sys
if sys.version_info < (3,):
sys.exit("catimg requires Python 3")
from setuptools import setup
import versioneer
setup(
name='catimg',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
description='''Print an image of a cat from Imgur to iTerm2.''',
author='Aaron Meurer',
author_email='[email protected]',
url='https://github.com/asmeurer/catimg',
packages=['catimg', 'catimg.tests'],
package_data={'catimg.tests': ['aloha_cat.png']},
long_description="""
catimg
Uses iTerm2's proprietary escape codes and Imgur to display an image of a cat
in your terminal.
NOTE: I do not own the images that you see, nor have I any control over
them. You will see some image that is tagged as "cat" on Imgur. That could be
anything. I do filter out images that are tagged NSFW, but there are no
guarantees that you won't see something you wish you hadn't. Use at your own
risk.
License: MIT
""",
entry_points={'console_scripts': [ 'catimg = catimg.__main__:main']},
install_requires=[
'requests',
'imgurpython',
],
license="MIT",
classifiers=[
'Environment :: MacOS X',
'Operating System :: MacOS :: MacOS X',
'Programming Language :: Python :: 3',
],
zip_safe=False,
)
|
<commit_before>#!/usr/bin/env python
import sys
if sys.version_info < (3,):
sys.exit("catimg requires Python 3")
from setuptools import setup
import versioneer
setup(
name='catimg',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
description='''Print an image of a cat from Imgur to iTerm2.''',
author='Aaron Meurer',
author_email='[email protected]',
url='https://github.com/asmeurer/catimg',
packages=['catimg'],
package_data={'catimg.tests': ['aloha_cat.png']},
long_description="""
catimg
Uses iTerm2's proprietary escape codes and Imgur to display an image of a cat
in your terminal.
NOTE: I do not own the images that you see, nor have I any control over
them. You will see some image that is tagged as "cat" on Imgur. That could be
anything. I do filter out images that are tagged NSFW, but there are no
guarantees that you won't see something you wish you hadn't. Use at your own
risk.
License: MIT
""",
entry_points={'console_scripts': [ 'catimg = catimg.__main__:main']},
install_requires=[
'requests',
'imgurpython',
],
license="MIT",
classifiers=[
'Environment :: MacOS X',
'Operating System :: MacOS :: MacOS X',
'Programming Language :: Python :: 3',
],
zip_safe=False,
)
<commit_msg>Include the tests in the install<commit_after>#!/usr/bin/env python
import sys
if sys.version_info < (3,):
sys.exit("catimg requires Python 3")
from setuptools import setup
import versioneer
setup(
name='catimg',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
description='''Print an image of a cat from Imgur to iTerm2.''',
author='Aaron Meurer',
author_email='[email protected]',
url='https://github.com/asmeurer/catimg',
packages=['catimg', 'catimg.tests'],
package_data={'catimg.tests': ['aloha_cat.png']},
long_description="""
catimg
Uses iTerm2's proprietary escape codes and Imgur to display an image of a cat
in your terminal.
NOTE: I do not own the images that you see, nor have I any control over
them. You will see some image that is tagged as "cat" on Imgur. That could be
anything. I do filter out images that are tagged NSFW, but there are no
guarantees that you won't see something you wish you hadn't. Use at your own
risk.
License: MIT
""",
entry_points={'console_scripts': [ 'catimg = catimg.__main__:main']},
install_requires=[
'requests',
'imgurpython',
],
license="MIT",
classifiers=[
'Environment :: MacOS X',
'Operating System :: MacOS :: MacOS X',
'Programming Language :: Python :: 3',
],
zip_safe=False,
)
|
6b358e001c270b4ee735550c829a47c4ee4118b4
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='syslog2IRC',
version='0.8',
description='A proxy to forward syslog messages to IRC',
url='http://homework.nwsnet.de/releases/c474/#syslog2irc',
author='Jochen Kupperschmidt',
author_email='[email protected]',
license='MIT',
classifiers=[
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Communications :: Chat :: Internet Relay Chat',
'Topic :: Internet',
'Topic :: System :: Logging',
'Topic :: System :: Monitoring',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
],
)
|
# -*- coding: utf-8 -*-
import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='syslog2IRC',
version='0.8',
description='A proxy to forward syslog messages to IRC',
long_description=long_description,
url='http://homework.nwsnet.de/releases/c474/#syslog2irc',
author='Jochen Kupperschmidt',
author_email='[email protected]',
license='MIT',
classifiers=[
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Communications :: Chat :: Internet Relay Chat',
'Topic :: Internet',
'Topic :: System :: Logging',
'Topic :: System :: Monitoring',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
],
)
|
Include README content as long description.
|
Include README content as long description.
|
Python
|
mit
|
Emantor/syslog2irc,homeworkprod/syslog2irc
|
from setuptools import setup
setup(
name='syslog2IRC',
version='0.8',
description='A proxy to forward syslog messages to IRC',
url='http://homework.nwsnet.de/releases/c474/#syslog2irc',
author='Jochen Kupperschmidt',
author_email='[email protected]',
license='MIT',
classifiers=[
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Communications :: Chat :: Internet Relay Chat',
'Topic :: Internet',
'Topic :: System :: Logging',
'Topic :: System :: Monitoring',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
],
)
Include README content as long description.
|
# -*- coding: utf-8 -*-
import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='syslog2IRC',
version='0.8',
description='A proxy to forward syslog messages to IRC',
long_description=long_description,
url='http://homework.nwsnet.de/releases/c474/#syslog2irc',
author='Jochen Kupperschmidt',
author_email='[email protected]',
license='MIT',
classifiers=[
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Communications :: Chat :: Internet Relay Chat',
'Topic :: Internet',
'Topic :: System :: Logging',
'Topic :: System :: Monitoring',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
],
)
|
<commit_before>from setuptools import setup
setup(
name='syslog2IRC',
version='0.8',
description='A proxy to forward syslog messages to IRC',
url='http://homework.nwsnet.de/releases/c474/#syslog2irc',
author='Jochen Kupperschmidt',
author_email='[email protected]',
license='MIT',
classifiers=[
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Communications :: Chat :: Internet Relay Chat',
'Topic :: Internet',
'Topic :: System :: Logging',
'Topic :: System :: Monitoring',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
],
)
<commit_msg>Include README content as long description.<commit_after>
|
# -*- coding: utf-8 -*-
import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='syslog2IRC',
version='0.8',
description='A proxy to forward syslog messages to IRC',
long_description=long_description,
url='http://homework.nwsnet.de/releases/c474/#syslog2irc',
author='Jochen Kupperschmidt',
author_email='[email protected]',
license='MIT',
classifiers=[
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Communications :: Chat :: Internet Relay Chat',
'Topic :: Internet',
'Topic :: System :: Logging',
'Topic :: System :: Monitoring',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
],
)
|
from setuptools import setup
setup(
name='syslog2IRC',
version='0.8',
description='A proxy to forward syslog messages to IRC',
url='http://homework.nwsnet.de/releases/c474/#syslog2irc',
author='Jochen Kupperschmidt',
author_email='[email protected]',
license='MIT',
classifiers=[
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Communications :: Chat :: Internet Relay Chat',
'Topic :: Internet',
'Topic :: System :: Logging',
'Topic :: System :: Monitoring',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
],
)
Include README content as long description.# -*- coding: utf-8 -*-
import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='syslog2IRC',
version='0.8',
description='A proxy to forward syslog messages to IRC',
long_description=long_description,
url='http://homework.nwsnet.de/releases/c474/#syslog2irc',
author='Jochen Kupperschmidt',
author_email='[email protected]',
license='MIT',
classifiers=[
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Communications :: Chat :: Internet Relay Chat',
'Topic :: Internet',
'Topic :: System :: Logging',
'Topic :: System :: Monitoring',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
],
)
|
<commit_before>from setuptools import setup
setup(
name='syslog2IRC',
version='0.8',
description='A proxy to forward syslog messages to IRC',
url='http://homework.nwsnet.de/releases/c474/#syslog2irc',
author='Jochen Kupperschmidt',
author_email='[email protected]',
license='MIT',
classifiers=[
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Communications :: Chat :: Internet Relay Chat',
'Topic :: Internet',
'Topic :: System :: Logging',
'Topic :: System :: Monitoring',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
],
)
<commit_msg>Include README content as long description.<commit_after># -*- coding: utf-8 -*-
import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='syslog2IRC',
version='0.8',
description='A proxy to forward syslog messages to IRC',
long_description=long_description,
url='http://homework.nwsnet.de/releases/c474/#syslog2irc',
author='Jochen Kupperschmidt',
author_email='[email protected]',
license='MIT',
classifiers=[
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Communications :: Chat :: Internet Relay Chat',
'Topic :: Internet',
'Topic :: System :: Logging',
'Topic :: System :: Monitoring',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
],
)
|
0cc9d5ccc815161d2a64edf4183fc6d14326b43a
|
tests/test_playartist/test_query.py
|
tests/test_playartist/test_query.py
|
#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import nose.tools as nose
from tests.utils import run_filter
def test_ignore_case():
"""should ignore case when querying artists"""
results = run_filter('playartist', 'beatl')
nose.assert_equal(results[0]['title'], 'The Beatles')
def test_partial():
"""should match partial queries when querying artists"""
results = run_filter('playartist', 'light or')
nose.assert_equal(results[0]['title'], 'Electric Light Orchestra')
|
#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import nose.tools as nose
from tests.utils import run_filter
def test_ignore_case():
"""should ignore case when querying artists"""
results = run_filter('playartist', 'BeatL')
nose.assert_equal(results[0]['title'], 'The Beatles')
def test_partial():
"""should match partial queries when querying artists"""
results = run_filter('playartist', 'light or')
nose.assert_equal(results[0]['title'], 'Electric Light Orchestra')
|
Correct playartist 'ignore case' test
|
Correct playartist 'ignore case' test
|
Python
|
mit
|
caleb531/play-song,caleb531/play-song
|
#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import nose.tools as nose
from tests.utils import run_filter
def test_ignore_case():
"""should ignore case when querying artists"""
results = run_filter('playartist', 'beatl')
nose.assert_equal(results[0]['title'], 'The Beatles')
def test_partial():
"""should match partial queries when querying artists"""
results = run_filter('playartist', 'light or')
nose.assert_equal(results[0]['title'], 'Electric Light Orchestra')
Correct playartist 'ignore case' test
|
#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import nose.tools as nose
from tests.utils import run_filter
def test_ignore_case():
"""should ignore case when querying artists"""
results = run_filter('playartist', 'BeatL')
nose.assert_equal(results[0]['title'], 'The Beatles')
def test_partial():
"""should match partial queries when querying artists"""
results = run_filter('playartist', 'light or')
nose.assert_equal(results[0]['title'], 'Electric Light Orchestra')
|
<commit_before>#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import nose.tools as nose
from tests.utils import run_filter
def test_ignore_case():
"""should ignore case when querying artists"""
results = run_filter('playartist', 'beatl')
nose.assert_equal(results[0]['title'], 'The Beatles')
def test_partial():
"""should match partial queries when querying artists"""
results = run_filter('playartist', 'light or')
nose.assert_equal(results[0]['title'], 'Electric Light Orchestra')
<commit_msg>Correct playartist 'ignore case' test<commit_after>
|
#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import nose.tools as nose
from tests.utils import run_filter
def test_ignore_case():
"""should ignore case when querying artists"""
results = run_filter('playartist', 'BeatL')
nose.assert_equal(results[0]['title'], 'The Beatles')
def test_partial():
"""should match partial queries when querying artists"""
results = run_filter('playartist', 'light or')
nose.assert_equal(results[0]['title'], 'Electric Light Orchestra')
|
#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import nose.tools as nose
from tests.utils import run_filter
def test_ignore_case():
"""should ignore case when querying artists"""
results = run_filter('playartist', 'beatl')
nose.assert_equal(results[0]['title'], 'The Beatles')
def test_partial():
"""should match partial queries when querying artists"""
results = run_filter('playartist', 'light or')
nose.assert_equal(results[0]['title'], 'Electric Light Orchestra')
Correct playartist 'ignore case' test#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import nose.tools as nose
from tests.utils import run_filter
def test_ignore_case():
"""should ignore case when querying artists"""
results = run_filter('playartist', 'BeatL')
nose.assert_equal(results[0]['title'], 'The Beatles')
def test_partial():
"""should match partial queries when querying artists"""
results = run_filter('playartist', 'light or')
nose.assert_equal(results[0]['title'], 'Electric Light Orchestra')
|
<commit_before>#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import nose.tools as nose
from tests.utils import run_filter
def test_ignore_case():
"""should ignore case when querying artists"""
results = run_filter('playartist', 'beatl')
nose.assert_equal(results[0]['title'], 'The Beatles')
def test_partial():
"""should match partial queries when querying artists"""
results = run_filter('playartist', 'light or')
nose.assert_equal(results[0]['title'], 'Electric Light Orchestra')
<commit_msg>Correct playartist 'ignore case' test<commit_after>#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import nose.tools as nose
from tests.utils import run_filter
def test_ignore_case():
"""should ignore case when querying artists"""
results = run_filter('playartist', 'BeatL')
nose.assert_equal(results[0]['title'], 'The Beatles')
def test_partial():
"""should match partial queries when querying artists"""
results = run_filter('playartist', 'light or')
nose.assert_equal(results[0]['title'], 'Electric Light Orchestra')
|
000e3b96f6fa77cc9d6e60af67ec98ecc0d2497e
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distutils.core import setup
import request
setup(
name='django-request',
version='%s' % request.__version__,
description='django-request is a statistics module for django. It stores requests in a database for admins to see, it can also be used to get statistics on who is online etc.',
author='Kyle Fuller',
author_email='[email protected]',
url='http://kylefuller.co.uk/projects/django-request/',
download_url='http://github.com/kylef/django-request/zipball/master',
packages=['request'],
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
|
#!/usr/bin/env python
from distutils.core import setup
import request
setup(
name='django-request',
version='%s' % request.__version__,
description='django-request is a statistics module for django. It stores requests in a database for admins to see, it can also be used to get statistics on who is online etc.',
author='Kyle Fuller',
author_email='[email protected]',
url='http://kylefuller.co.uk/projects/django-request/',
download_url='http://github.com/kylef/django-request/zipball/master',
packages=['request', 'request.templatetags'],
package_data={'request': ['templates/admin/request/*.html', 'templates/admin/request/request/*.html']},
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
|
Copy the templates when installed.
|
Copy the templates when installed.
|
Python
|
bsd-2-clause
|
kylef/django-request,kylef/django-request,gnublade/django-request,gnublade/django-request,gnublade/django-request,kylef/django-request
|
#!/usr/bin/env python
from distutils.core import setup
import request
setup(
name='django-request',
version='%s' % request.__version__,
description='django-request is a statistics module for django. It stores requests in a database for admins to see, it can also be used to get statistics on who is online etc.',
author='Kyle Fuller',
author_email='[email protected]',
url='http://kylefuller.co.uk/projects/django-request/',
download_url='http://github.com/kylef/django-request/zipball/master',
packages=['request'],
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
Copy the templates when installed.
|
#!/usr/bin/env python
from distutils.core import setup
import request
setup(
name='django-request',
version='%s' % request.__version__,
description='django-request is a statistics module for django. It stores requests in a database for admins to see, it can also be used to get statistics on who is online etc.',
author='Kyle Fuller',
author_email='[email protected]',
url='http://kylefuller.co.uk/projects/django-request/',
download_url='http://github.com/kylef/django-request/zipball/master',
packages=['request', 'request.templatetags'],
package_data={'request': ['templates/admin/request/*.html', 'templates/admin/request/request/*.html']},
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
import request
setup(
name='django-request',
version='%s' % request.__version__,
description='django-request is a statistics module for django. It stores requests in a database for admins to see, it can also be used to get statistics on who is online etc.',
author='Kyle Fuller',
author_email='[email protected]',
url='http://kylefuller.co.uk/projects/django-request/',
download_url='http://github.com/kylef/django-request/zipball/master',
packages=['request'],
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
<commit_msg>Copy the templates when installed.<commit_after>
|
#!/usr/bin/env python
from distutils.core import setup
import request
setup(
name='django-request',
version='%s' % request.__version__,
description='django-request is a statistics module for django. It stores requests in a database for admins to see, it can also be used to get statistics on who is online etc.',
author='Kyle Fuller',
author_email='[email protected]',
url='http://kylefuller.co.uk/projects/django-request/',
download_url='http://github.com/kylef/django-request/zipball/master',
packages=['request', 'request.templatetags'],
package_data={'request': ['templates/admin/request/*.html', 'templates/admin/request/request/*.html']},
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
|
#!/usr/bin/env python
from distutils.core import setup
import request
setup(
name='django-request',
version='%s' % request.__version__,
description='django-request is a statistics module for django. It stores requests in a database for admins to see, it can also be used to get statistics on who is online etc.',
author='Kyle Fuller',
author_email='[email protected]',
url='http://kylefuller.co.uk/projects/django-request/',
download_url='http://github.com/kylef/django-request/zipball/master',
packages=['request'],
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
Copy the templates when installed.#!/usr/bin/env python
from distutils.core import setup
import request
setup(
name='django-request',
version='%s' % request.__version__,
description='django-request is a statistics module for django. It stores requests in a database for admins to see, it can also be used to get statistics on who is online etc.',
author='Kyle Fuller',
author_email='[email protected]',
url='http://kylefuller.co.uk/projects/django-request/',
download_url='http://github.com/kylef/django-request/zipball/master',
packages=['request', 'request.templatetags'],
package_data={'request': ['templates/admin/request/*.html', 'templates/admin/request/request/*.html']},
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
import request
setup(
name='django-request',
version='%s' % request.__version__,
description='django-request is a statistics module for django. It stores requests in a database for admins to see, it can also be used to get statistics on who is online etc.',
author='Kyle Fuller',
author_email='[email protected]',
url='http://kylefuller.co.uk/projects/django-request/',
download_url='http://github.com/kylef/django-request/zipball/master',
packages=['request'],
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
<commit_msg>Copy the templates when installed.<commit_after>#!/usr/bin/env python
from distutils.core import setup
import request
setup(
name='django-request',
version='%s' % request.__version__,
description='django-request is a statistics module for django. It stores requests in a database for admins to see, it can also be used to get statistics on who is online etc.',
author='Kyle Fuller',
author_email='[email protected]',
url='http://kylefuller.co.uk/projects/django-request/',
download_url='http://github.com/kylef/django-request/zipball/master',
packages=['request', 'request.templatetags'],
package_data={'request': ['templates/admin/request/*.html', 'templates/admin/request/request/*.html']},
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
|
587071437d6f00b255e8aa00c5b82a6d05dff63e
|
repl.py
|
repl.py
|
#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import lexer, parser, env
TEA_VERSION = "0.0.5-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
try:
tokens = lexer.run(expression)
print('Generated tokens:', ', '.join((str(e) for e in tokens)))
tree = parser.generate(tokens)
print(tree)
return tree.eval(context).data
except (env.FunctionException, env.OperatorException, env.RuntimeException, parser.ParseException) as e:
return str(e)
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
|
#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import lexer, parser, env, flags
TEA_VERSION = "0.0.5-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = ">> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
CLI_ERROR = "!! "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "!exit":
context.flags.append("exit")
return
if expression == "!debug":
flags.debug = not flags.debug
return "Debug mode %s" % ("on" if flags.debug else "off")
if expression.startswith("!exec"):
# load file
filename = expression.split(' ')[1]
print("Executing %s" % filename)
with open(filename, "r") as f:
expression = ' '.join(line for line in f)
try:
tokens = lexer.run(expression)
tree = parser.generate(tokens)
return CLI_RESULT + tree.eval(context).format()
except (env.FunctionException, env.OperatorException, env.RuntimeException, parser.ParseException) as e:
return CLI_ERROR + str(e)
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
|
Add !exec command for basic code reuse
|
Add !exec command for basic code reuse
|
Python
|
mit
|
lnsp/tea,lnsp/tea
|
#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import lexer, parser, env
TEA_VERSION = "0.0.5-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
try:
tokens = lexer.run(expression)
print('Generated tokens:', ', '.join((str(e) for e in tokens)))
tree = parser.generate(tokens)
print(tree)
return tree.eval(context).data
except (env.FunctionException, env.OperatorException, env.RuntimeException, parser.ParseException) as e:
return str(e)
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
Add !exec command for basic code reuse
|
#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import lexer, parser, env, flags
TEA_VERSION = "0.0.5-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = ">> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
CLI_ERROR = "!! "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "!exit":
context.flags.append("exit")
return
if expression == "!debug":
flags.debug = not flags.debug
return "Debug mode %s" % ("on" if flags.debug else "off")
if expression.startswith("!exec"):
# load file
filename = expression.split(' ')[1]
print("Executing %s" % filename)
with open(filename, "r") as f:
expression = ' '.join(line for line in f)
try:
tokens = lexer.run(expression)
tree = parser.generate(tokens)
return CLI_RESULT + tree.eval(context).format()
except (env.FunctionException, env.OperatorException, env.RuntimeException, parser.ParseException) as e:
return CLI_ERROR + str(e)
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import lexer, parser, env
TEA_VERSION = "0.0.5-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
try:
tokens = lexer.run(expression)
print('Generated tokens:', ', '.join((str(e) for e in tokens)))
tree = parser.generate(tokens)
print(tree)
return tree.eval(context).data
except (env.FunctionException, env.OperatorException, env.RuntimeException, parser.ParseException) as e:
return str(e)
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
<commit_msg>Add !exec command for basic code reuse<commit_after>
|
#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import lexer, parser, env, flags
TEA_VERSION = "0.0.5-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = ">> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
CLI_ERROR = "!! "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "!exit":
context.flags.append("exit")
return
if expression == "!debug":
flags.debug = not flags.debug
return "Debug mode %s" % ("on" if flags.debug else "off")
if expression.startswith("!exec"):
# load file
filename = expression.split(' ')[1]
print("Executing %s" % filename)
with open(filename, "r") as f:
expression = ' '.join(line for line in f)
try:
tokens = lexer.run(expression)
tree = parser.generate(tokens)
return CLI_RESULT + tree.eval(context).format()
except (env.FunctionException, env.OperatorException, env.RuntimeException, parser.ParseException) as e:
return CLI_ERROR + str(e)
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
|
#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import lexer, parser, env
TEA_VERSION = "0.0.5-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
try:
tokens = lexer.run(expression)
print('Generated tokens:', ', '.join((str(e) for e in tokens)))
tree = parser.generate(tokens)
print(tree)
return tree.eval(context).data
except (env.FunctionException, env.OperatorException, env.RuntimeException, parser.ParseException) as e:
return str(e)
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
Add !exec command for basic code reuse#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import lexer, parser, env, flags
TEA_VERSION = "0.0.5-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = ">> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
CLI_ERROR = "!! "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "!exit":
context.flags.append("exit")
return
if expression == "!debug":
flags.debug = not flags.debug
return "Debug mode %s" % ("on" if flags.debug else "off")
if expression.startswith("!exec"):
# load file
filename = expression.split(' ')[1]
print("Executing %s" % filename)
with open(filename, "r") as f:
expression = ' '.join(line for line in f)
try:
tokens = lexer.run(expression)
tree = parser.generate(tokens)
return CLI_RESULT + tree.eval(context).format()
except (env.FunctionException, env.OperatorException, env.RuntimeException, parser.ParseException) as e:
return CLI_ERROR + str(e)
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import lexer, parser, env
TEA_VERSION = "0.0.5-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = "#> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "exit":
context.flags.append("exit")
return
try:
tokens = lexer.run(expression)
print('Generated tokens:', ', '.join((str(e) for e in tokens)))
tree = parser.generate(tokens)
print(tree)
return tree.eval(context).data
except (env.FunctionException, env.OperatorException, env.RuntimeException, parser.ParseException) as e:
return str(e)
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
<commit_msg>Add !exec command for basic code reuse<commit_after>#!/usr/bin/python3
"""Command line runtime for Tea."""
import runtime.lib
from runtime import lexer, parser, env, flags
TEA_VERSION = "0.0.5-dev"
TEA_TITLE = "Tea @" + TEA_VERSION
CLI_SYMBOL = ">> "
CLI_SPACE = " " * 3
CLI_RESULT = "<- "
CLI_ERROR = "!! "
def interpret(expression, context):
"""Interpret an expression by tokenizing, parsing and evaluating."""
if expression == "!exit":
context.flags.append("exit")
return
if expression == "!debug":
flags.debug = not flags.debug
return "Debug mode %s" % ("on" if flags.debug else "off")
if expression.startswith("!exec"):
# load file
filename = expression.split(' ')[1]
print("Executing %s" % filename)
with open(filename, "r") as f:
expression = ' '.join(line for line in f)
try:
tokens = lexer.run(expression)
tree = parser.generate(tokens)
return CLI_RESULT + tree.eval(context).format()
except (env.FunctionException, env.OperatorException, env.RuntimeException, parser.ParseException) as e:
return CLI_ERROR + str(e)
def main():
"""Run the CLI."""
# print application title
print(TEA_TITLE)
# run REPL
context = env.empty_context()
context.load(runtime.lib)
while "done" not in context.flags:
output = interpret(input(CLI_SYMBOL), context)
while "continue" in context.flags:
output = interpret(input(CLI_SPACE), context)
if "exit" in context.flags:
return
print(output)
if __name__ == "__main__":
main()
|
d4a67c8895349532fbc7764531130f737ca53d89
|
setup.py
|
setup.py
|
from setuptools import setup
version = '0.2.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'Django',
'cassandralib',
'django-extensions',
'django-nose',
'django-treebeard',
'lizard-security',
'lizard-ui >= 4.0b5',
'pandas',
'treebeard',
],
tests_require = [
]
setup(name='ddsc-core',
version=version,
description="TODO",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Programming Language :: Python',
'Framework :: Django',
],
keywords=[],
author='TODO',
author_email='[email protected]',
url='',
license='GPL',
packages=['ddsc_core'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require={'test': tests_require},
entry_points={
'console_scripts': [
]},
)
|
from setuptools import setup
version = '0.2.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'Django',
'cassandralib',
'django-extensions',
'django-nose',
'django-treebeard',
'lizard-security',
'lizard-ui >= 4.0b5',
'pandas',
],
tests_require = [
]
setup(name='ddsc-core',
version=version,
description="TODO",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Programming Language :: Python',
'Framework :: Django',
],
keywords=[],
author='TODO',
author_email='[email protected]',
url='',
license='GPL',
packages=['ddsc_core'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require={'test': tests_require},
entry_points={
'console_scripts': [
]},
)
|
Remove treebeard dependency (it's django-treebeard).
|
Remove treebeard dependency (it's django-treebeard).
|
Python
|
mit
|
ddsc/ddsc-core,ddsc/ddsc-core
|
from setuptools import setup
version = '0.2.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'Django',
'cassandralib',
'django-extensions',
'django-nose',
'django-treebeard',
'lizard-security',
'lizard-ui >= 4.0b5',
'pandas',
'treebeard',
],
tests_require = [
]
setup(name='ddsc-core',
version=version,
description="TODO",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Programming Language :: Python',
'Framework :: Django',
],
keywords=[],
author='TODO',
author_email='[email protected]',
url='',
license='GPL',
packages=['ddsc_core'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require={'test': tests_require},
entry_points={
'console_scripts': [
]},
)
Remove treebeard dependency (it's django-treebeard).
|
from setuptools import setup
version = '0.2.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'Django',
'cassandralib',
'django-extensions',
'django-nose',
'django-treebeard',
'lizard-security',
'lizard-ui >= 4.0b5',
'pandas',
],
tests_require = [
]
setup(name='ddsc-core',
version=version,
description="TODO",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Programming Language :: Python',
'Framework :: Django',
],
keywords=[],
author='TODO',
author_email='[email protected]',
url='',
license='GPL',
packages=['ddsc_core'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require={'test': tests_require},
entry_points={
'console_scripts': [
]},
)
|
<commit_before>from setuptools import setup
version = '0.2.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'Django',
'cassandralib',
'django-extensions',
'django-nose',
'django-treebeard',
'lizard-security',
'lizard-ui >= 4.0b5',
'pandas',
'treebeard',
],
tests_require = [
]
setup(name='ddsc-core',
version=version,
description="TODO",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Programming Language :: Python',
'Framework :: Django',
],
keywords=[],
author='TODO',
author_email='[email protected]',
url='',
license='GPL',
packages=['ddsc_core'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require={'test': tests_require},
entry_points={
'console_scripts': [
]},
)
<commit_msg>Remove treebeard dependency (it's django-treebeard).<commit_after>
|
from setuptools import setup
version = '0.2.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'Django',
'cassandralib',
'django-extensions',
'django-nose',
'django-treebeard',
'lizard-security',
'lizard-ui >= 4.0b5',
'pandas',
],
tests_require = [
]
setup(name='ddsc-core',
version=version,
description="TODO",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Programming Language :: Python',
'Framework :: Django',
],
keywords=[],
author='TODO',
author_email='[email protected]',
url='',
license='GPL',
packages=['ddsc_core'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require={'test': tests_require},
entry_points={
'console_scripts': [
]},
)
|
from setuptools import setup
version = '0.2.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'Django',
'cassandralib',
'django-extensions',
'django-nose',
'django-treebeard',
'lizard-security',
'lizard-ui >= 4.0b5',
'pandas',
'treebeard',
],
tests_require = [
]
setup(name='ddsc-core',
version=version,
description="TODO",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Programming Language :: Python',
'Framework :: Django',
],
keywords=[],
author='TODO',
author_email='[email protected]',
url='',
license='GPL',
packages=['ddsc_core'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require={'test': tests_require},
entry_points={
'console_scripts': [
]},
)
Remove treebeard dependency (it's django-treebeard).from setuptools import setup
version = '0.2.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'Django',
'cassandralib',
'django-extensions',
'django-nose',
'django-treebeard',
'lizard-security',
'lizard-ui >= 4.0b5',
'pandas',
],
tests_require = [
]
setup(name='ddsc-core',
version=version,
description="TODO",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Programming Language :: Python',
'Framework :: Django',
],
keywords=[],
author='TODO',
author_email='[email protected]',
url='',
license='GPL',
packages=['ddsc_core'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require={'test': tests_require},
entry_points={
'console_scripts': [
]},
)
|
<commit_before>from setuptools import setup
version = '0.2.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'Django',
'cassandralib',
'django-extensions',
'django-nose',
'django-treebeard',
'lizard-security',
'lizard-ui >= 4.0b5',
'pandas',
'treebeard',
],
tests_require = [
]
setup(name='ddsc-core',
version=version,
description="TODO",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Programming Language :: Python',
'Framework :: Django',
],
keywords=[],
author='TODO',
author_email='[email protected]',
url='',
license='GPL',
packages=['ddsc_core'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require={'test': tests_require},
entry_points={
'console_scripts': [
]},
)
<commit_msg>Remove treebeard dependency (it's django-treebeard).<commit_after>from setuptools import setup
version = '0.2.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'Django',
'cassandralib',
'django-extensions',
'django-nose',
'django-treebeard',
'lizard-security',
'lizard-ui >= 4.0b5',
'pandas',
],
tests_require = [
]
setup(name='ddsc-core',
version=version,
description="TODO",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Programming Language :: Python',
'Framework :: Django',
],
keywords=[],
author='TODO',
author_email='[email protected]',
url='',
license='GPL',
packages=['ddsc_core'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require={'test': tests_require},
entry_points={
'console_scripts': [
]},
)
|
f349753417682960e607b458a009fbfd324de7ab
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup
execfile('kronos/version.py')
setup(
name = 'django-kronos',
version = __version__,
description = 'Kronos is a Django application that makes it easy to define and schedule tasks with cron.',
long_description = open('README.rst').read(),
author = 'Johannes Gorset',
author_email = '[email protected]',
url = 'http://github.com/jgorset/kronos',
packages = ['kronos', 'kronos.management', 'kronos.management.commands']
)
|
#!/usr/bin/env python
from setuptools import setup
execfile('kronos/version.py')
readme = open('README.rst').read()
history = open('HISTORY.rst').read()
setup(
name = 'django-kronos',
version = __version__,
description = 'Kronos is a Django application that makes it easy to define and schedule tasks with cron.',
long_description = readme + '\n\n' + history,
author = 'Johannes Gorset',
author_email = '[email protected]',
url = 'http://github.com/jgorset/kronos',
packages = ['kronos', 'kronos.management', 'kronos.management.commands']
)
|
Add history to long description
|
Add history to long description
|
Python
|
mit
|
jeanbaptistelab/django-kronos,jeanbaptistelab/django-kronos,joshblum/django-kronos,jgorset/django-kronos,jgorset/django-kronos,joshblum/django-kronos
|
#!/usr/bin/env python
from setuptools import setup
execfile('kronos/version.py')
setup(
name = 'django-kronos',
version = __version__,
description = 'Kronos is a Django application that makes it easy to define and schedule tasks with cron.',
long_description = open('README.rst').read(),
author = 'Johannes Gorset',
author_email = '[email protected]',
url = 'http://github.com/jgorset/kronos',
packages = ['kronos', 'kronos.management', 'kronos.management.commands']
)
Add history to long description
|
#!/usr/bin/env python
from setuptools import setup
execfile('kronos/version.py')
readme = open('README.rst').read()
history = open('HISTORY.rst').read()
setup(
name = 'django-kronos',
version = __version__,
description = 'Kronos is a Django application that makes it easy to define and schedule tasks with cron.',
long_description = readme + '\n\n' + history,
author = 'Johannes Gorset',
author_email = '[email protected]',
url = 'http://github.com/jgorset/kronos',
packages = ['kronos', 'kronos.management', 'kronos.management.commands']
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
execfile('kronos/version.py')
setup(
name = 'django-kronos',
version = __version__,
description = 'Kronos is a Django application that makes it easy to define and schedule tasks with cron.',
long_description = open('README.rst').read(),
author = 'Johannes Gorset',
author_email = '[email protected]',
url = 'http://github.com/jgorset/kronos',
packages = ['kronos', 'kronos.management', 'kronos.management.commands']
)
<commit_msg>Add history to long description<commit_after>
|
#!/usr/bin/env python
from setuptools import setup
execfile('kronos/version.py')
readme = open('README.rst').read()
history = open('HISTORY.rst').read()
setup(
name = 'django-kronos',
version = __version__,
description = 'Kronos is a Django application that makes it easy to define and schedule tasks with cron.',
long_description = readme + '\n\n' + history,
author = 'Johannes Gorset',
author_email = '[email protected]',
url = 'http://github.com/jgorset/kronos',
packages = ['kronos', 'kronos.management', 'kronos.management.commands']
)
|
#!/usr/bin/env python
from setuptools import setup
execfile('kronos/version.py')
setup(
name = 'django-kronos',
version = __version__,
description = 'Kronos is a Django application that makes it easy to define and schedule tasks with cron.',
long_description = open('README.rst').read(),
author = 'Johannes Gorset',
author_email = '[email protected]',
url = 'http://github.com/jgorset/kronos',
packages = ['kronos', 'kronos.management', 'kronos.management.commands']
)
Add history to long description#!/usr/bin/env python
from setuptools import setup
execfile('kronos/version.py')
readme = open('README.rst').read()
history = open('HISTORY.rst').read()
setup(
name = 'django-kronos',
version = __version__,
description = 'Kronos is a Django application that makes it easy to define and schedule tasks with cron.',
long_description = readme + '\n\n' + history,
author = 'Johannes Gorset',
author_email = '[email protected]',
url = 'http://github.com/jgorset/kronos',
packages = ['kronos', 'kronos.management', 'kronos.management.commands']
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
execfile('kronos/version.py')
setup(
name = 'django-kronos',
version = __version__,
description = 'Kronos is a Django application that makes it easy to define and schedule tasks with cron.',
long_description = open('README.rst').read(),
author = 'Johannes Gorset',
author_email = '[email protected]',
url = 'http://github.com/jgorset/kronos',
packages = ['kronos', 'kronos.management', 'kronos.management.commands']
)
<commit_msg>Add history to long description<commit_after>#!/usr/bin/env python
from setuptools import setup
execfile('kronos/version.py')
readme = open('README.rst').read()
history = open('HISTORY.rst').read()
setup(
name = 'django-kronos',
version = __version__,
description = 'Kronos is a Django application that makes it easy to define and schedule tasks with cron.',
long_description = readme + '\n\n' + history,
author = 'Johannes Gorset',
author_email = '[email protected]',
url = 'http://github.com/jgorset/kronos',
packages = ['kronos', 'kronos.management', 'kronos.management.commands']
)
|
0b3c7183e7f8543de3e9875384c5623c24279c4d
|
setup.py
|
setup.py
|
__author__ = 'katharine'
import sys
from setuptools import setup, find_packages
requires = [
'libpebble2==0.0.14',
'httplib2==0.9.1',
'oauth2client==1.4.12',
'progressbar2==2.7.3',
'pyasn1==0.1.8',
'pyasn1-modules==0.0.6',
'pypng==0.0.17',
'pyqrcode==1.1',
'requests==2.7.0',
'rsa==3.1.4',
'pyserial==2.7',
'six==1.9.0',
'websocket-client==0.32.0',
'wheel==0.24.0',
'colorama==0.3.3',
]
if sys.version_info < (3, 4, 0):
requires.append('enum34==1.0.4')
setup(name='pebble-tool',
version='3.6',
description='Tool for interacting with pebbles.',
url='https://github.com/pebble/pebble-tool',
author='Pebble Technology Corporation',
author_email='[email protected]',
license='MIT',
packages=find_packages(),
install_requires=requires,
entry_points={
'console_scripts': ['pebble=pebble_tool:run_tool'],
},
zip_safe=True)
|
__author__ = 'katharine'
import sys
from setuptools import setup, find_packages
requires = [
'libpebble2==0.0.14',
'httplib2==0.9.1',
'oauth2client==1.4.12',
'progressbar2==2.7.3',
'pyasn1==0.1.8',
'pyasn1-modules==0.0.6',
'pypng==0.0.17',
'pyqrcode==1.1',
'requests==2.7.0',
'rsa==3.1.4',
'pyserial==2.7',
'six==1.9.0',
'websocket-client==0.32.0',
'wheel==0.24.0',
'colorama==0.3.3',
]
if sys.version_info < (3, 4, 0):
requires.append('enum34==1.0.4')
setup(name='pebble-tool',
version='3.6',
description='Tool for interacting with pebbles.',
url='https://github.com/pebble/pebble-tool',
author='Pebble Technology Corporation',
author_email='[email protected]',
license='MIT',
packages=find_packages(),
package_data={
'pebble_tool.commands.sdk': ['python'],
},
install_requires=requires,
entry_points={
'console_scripts': ['pebble=pebble_tool:run_tool'],
},
zip_safe=False)
|
Make sure our python alias is included in packaged versions.
|
Make sure our python alias is included in packaged versions.
|
Python
|
mit
|
pebble/pebble-tool,pebble/pebble-tool,gregoiresage/pebble-tool,gregoiresage/pebble-tool,pebble/pebble-tool,pebble/pebble-tool,gregoiresage/pebble-tool,gregoiresage/pebble-tool
|
__author__ = 'katharine'
import sys
from setuptools import setup, find_packages
requires = [
'libpebble2==0.0.14',
'httplib2==0.9.1',
'oauth2client==1.4.12',
'progressbar2==2.7.3',
'pyasn1==0.1.8',
'pyasn1-modules==0.0.6',
'pypng==0.0.17',
'pyqrcode==1.1',
'requests==2.7.0',
'rsa==3.1.4',
'pyserial==2.7',
'six==1.9.0',
'websocket-client==0.32.0',
'wheel==0.24.0',
'colorama==0.3.3',
]
if sys.version_info < (3, 4, 0):
requires.append('enum34==1.0.4')
setup(name='pebble-tool',
version='3.6',
description='Tool for interacting with pebbles.',
url='https://github.com/pebble/pebble-tool',
author='Pebble Technology Corporation',
author_email='[email protected]',
license='MIT',
packages=find_packages(),
install_requires=requires,
entry_points={
'console_scripts': ['pebble=pebble_tool:run_tool'],
},
zip_safe=True)
Make sure our python alias is included in packaged versions.
|
__author__ = 'katharine'
import sys
from setuptools import setup, find_packages
requires = [
'libpebble2==0.0.14',
'httplib2==0.9.1',
'oauth2client==1.4.12',
'progressbar2==2.7.3',
'pyasn1==0.1.8',
'pyasn1-modules==0.0.6',
'pypng==0.0.17',
'pyqrcode==1.1',
'requests==2.7.0',
'rsa==3.1.4',
'pyserial==2.7',
'six==1.9.0',
'websocket-client==0.32.0',
'wheel==0.24.0',
'colorama==0.3.3',
]
if sys.version_info < (3, 4, 0):
requires.append('enum34==1.0.4')
setup(name='pebble-tool',
version='3.6',
description='Tool for interacting with pebbles.',
url='https://github.com/pebble/pebble-tool',
author='Pebble Technology Corporation',
author_email='[email protected]',
license='MIT',
packages=find_packages(),
package_data={
'pebble_tool.commands.sdk': ['python'],
},
install_requires=requires,
entry_points={
'console_scripts': ['pebble=pebble_tool:run_tool'],
},
zip_safe=False)
|
<commit_before>__author__ = 'katharine'
import sys
from setuptools import setup, find_packages
requires = [
'libpebble2==0.0.14',
'httplib2==0.9.1',
'oauth2client==1.4.12',
'progressbar2==2.7.3',
'pyasn1==0.1.8',
'pyasn1-modules==0.0.6',
'pypng==0.0.17',
'pyqrcode==1.1',
'requests==2.7.0',
'rsa==3.1.4',
'pyserial==2.7',
'six==1.9.0',
'websocket-client==0.32.0',
'wheel==0.24.0',
'colorama==0.3.3',
]
if sys.version_info < (3, 4, 0):
requires.append('enum34==1.0.4')
setup(name='pebble-tool',
version='3.6',
description='Tool for interacting with pebbles.',
url='https://github.com/pebble/pebble-tool',
author='Pebble Technology Corporation',
author_email='[email protected]',
license='MIT',
packages=find_packages(),
install_requires=requires,
entry_points={
'console_scripts': ['pebble=pebble_tool:run_tool'],
},
zip_safe=True)
<commit_msg>Make sure our python alias is included in packaged versions.<commit_after>
|
__author__ = 'katharine'
import sys
from setuptools import setup, find_packages
requires = [
'libpebble2==0.0.14',
'httplib2==0.9.1',
'oauth2client==1.4.12',
'progressbar2==2.7.3',
'pyasn1==0.1.8',
'pyasn1-modules==0.0.6',
'pypng==0.0.17',
'pyqrcode==1.1',
'requests==2.7.0',
'rsa==3.1.4',
'pyserial==2.7',
'six==1.9.0',
'websocket-client==0.32.0',
'wheel==0.24.0',
'colorama==0.3.3',
]
if sys.version_info < (3, 4, 0):
requires.append('enum34==1.0.4')
setup(name='pebble-tool',
version='3.6',
description='Tool for interacting with pebbles.',
url='https://github.com/pebble/pebble-tool',
author='Pebble Technology Corporation',
author_email='[email protected]',
license='MIT',
packages=find_packages(),
package_data={
'pebble_tool.commands.sdk': ['python'],
},
install_requires=requires,
entry_points={
'console_scripts': ['pebble=pebble_tool:run_tool'],
},
zip_safe=False)
|
__author__ = 'katharine'
import sys
from setuptools import setup, find_packages
requires = [
'libpebble2==0.0.14',
'httplib2==0.9.1',
'oauth2client==1.4.12',
'progressbar2==2.7.3',
'pyasn1==0.1.8',
'pyasn1-modules==0.0.6',
'pypng==0.0.17',
'pyqrcode==1.1',
'requests==2.7.0',
'rsa==3.1.4',
'pyserial==2.7',
'six==1.9.0',
'websocket-client==0.32.0',
'wheel==0.24.0',
'colorama==0.3.3',
]
if sys.version_info < (3, 4, 0):
requires.append('enum34==1.0.4')
setup(name='pebble-tool',
version='3.6',
description='Tool for interacting with pebbles.',
url='https://github.com/pebble/pebble-tool',
author='Pebble Technology Corporation',
author_email='[email protected]',
license='MIT',
packages=find_packages(),
install_requires=requires,
entry_points={
'console_scripts': ['pebble=pebble_tool:run_tool'],
},
zip_safe=True)
Make sure our python alias is included in packaged versions.__author__ = 'katharine'
import sys
from setuptools import setup, find_packages
requires = [
'libpebble2==0.0.14',
'httplib2==0.9.1',
'oauth2client==1.4.12',
'progressbar2==2.7.3',
'pyasn1==0.1.8',
'pyasn1-modules==0.0.6',
'pypng==0.0.17',
'pyqrcode==1.1',
'requests==2.7.0',
'rsa==3.1.4',
'pyserial==2.7',
'six==1.9.0',
'websocket-client==0.32.0',
'wheel==0.24.0',
'colorama==0.3.3',
]
if sys.version_info < (3, 4, 0):
requires.append('enum34==1.0.4')
setup(name='pebble-tool',
version='3.6',
description='Tool for interacting with pebbles.',
url='https://github.com/pebble/pebble-tool',
author='Pebble Technology Corporation',
author_email='[email protected]',
license='MIT',
packages=find_packages(),
package_data={
'pebble_tool.commands.sdk': ['python'],
},
install_requires=requires,
entry_points={
'console_scripts': ['pebble=pebble_tool:run_tool'],
},
zip_safe=False)
|
<commit_before>__author__ = 'katharine'
import sys
from setuptools import setup, find_packages
requires = [
'libpebble2==0.0.14',
'httplib2==0.9.1',
'oauth2client==1.4.12',
'progressbar2==2.7.3',
'pyasn1==0.1.8',
'pyasn1-modules==0.0.6',
'pypng==0.0.17',
'pyqrcode==1.1',
'requests==2.7.0',
'rsa==3.1.4',
'pyserial==2.7',
'six==1.9.0',
'websocket-client==0.32.0',
'wheel==0.24.0',
'colorama==0.3.3',
]
if sys.version_info < (3, 4, 0):
requires.append('enum34==1.0.4')
setup(name='pebble-tool',
version='3.6',
description='Tool for interacting with pebbles.',
url='https://github.com/pebble/pebble-tool',
author='Pebble Technology Corporation',
author_email='[email protected]',
license='MIT',
packages=find_packages(),
install_requires=requires,
entry_points={
'console_scripts': ['pebble=pebble_tool:run_tool'],
},
zip_safe=True)
<commit_msg>Make sure our python alias is included in packaged versions.<commit_after>__author__ = 'katharine'
import sys
from setuptools import setup, find_packages
requires = [
'libpebble2==0.0.14',
'httplib2==0.9.1',
'oauth2client==1.4.12',
'progressbar2==2.7.3',
'pyasn1==0.1.8',
'pyasn1-modules==0.0.6',
'pypng==0.0.17',
'pyqrcode==1.1',
'requests==2.7.0',
'rsa==3.1.4',
'pyserial==2.7',
'six==1.9.0',
'websocket-client==0.32.0',
'wheel==0.24.0',
'colorama==0.3.3',
]
if sys.version_info < (3, 4, 0):
requires.append('enum34==1.0.4')
setup(name='pebble-tool',
version='3.6',
description='Tool for interacting with pebbles.',
url='https://github.com/pebble/pebble-tool',
author='Pebble Technology Corporation',
author_email='[email protected]',
license='MIT',
packages=find_packages(),
package_data={
'pebble_tool.commands.sdk': ['python'],
},
install_requires=requires,
entry_points={
'console_scripts': ['pebble=pebble_tool:run_tool'],
},
zip_safe=False)
|
d313c43f99ab167f6526698561617c234ee4799a
|
setup.py
|
setup.py
|
import setuptools
try:
import pypandoc
LONG_DESC = pypandoc.convert("README.md", "rst")
except(IOError, ImportError, RuntimeError):
LONG_DESC = open('README.md').read()
setuptools.setup(
name="tvtid",
version="0.1.5",
author="Christian Kirkegaard",
author_email="[email protected]",
description="Library and cli tool for querying tvtid.dk",
long_description=LONG_DESC,
license="MIT",
url="https://github.com/kirkegaard/tvtid.py",
download_url="https://github.com/kirkegaard/tvtid.py",
install_requires=[
'python-dateutil',
'fuzzywuzzy[speedup]',
'requests',
'requests_cache'
],
classifiers=[
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
py_modules=["tvtid"],
entry_points={
"console_scripts": [
"tvtid=tvtid:main"
]
},
python_requires=">=3.5",
include_package_data=True
)
|
import setuptools
try:
import pypandoc
LONG_DESC = pypandoc.convert("README.md", "rst")
except(IOError, ImportError, RuntimeError):
LONG_DESC = open('README.md').read()
setuptools.setup(
name="tvtid",
version="0.1.6",
author="Christian Kirkegaard",
author_email="[email protected]",
description="Library and cli tool for querying tvtid.dk",
long_description=LONG_DESC,
license="MIT",
url="https://github.com/kirkegaard/tvtid.py",
download_url="https://github.com/kirkegaard/tvtid.py",
install_requires=[
'python-dateutil',
'fuzzywuzzy[speedup]',
'requests',
'requests_cache'
],
classifiers=[
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
py_modules=["tvtid"],
entry_points={
"console_scripts": [
"tvtid=tvtid:main"
]
},
python_requires=">=3.5",
include_package_data=True
)
|
Convert readme since pypi doesnt support markdown
|
Convert readme since pypi doesnt support markdown
|
Python
|
mit
|
kirkegaard/tvtid.py
|
import setuptools
try:
import pypandoc
LONG_DESC = pypandoc.convert("README.md", "rst")
except(IOError, ImportError, RuntimeError):
LONG_DESC = open('README.md').read()
setuptools.setup(
name="tvtid",
version="0.1.5",
author="Christian Kirkegaard",
author_email="[email protected]",
description="Library and cli tool for querying tvtid.dk",
long_description=LONG_DESC,
license="MIT",
url="https://github.com/kirkegaard/tvtid.py",
download_url="https://github.com/kirkegaard/tvtid.py",
install_requires=[
'python-dateutil',
'fuzzywuzzy[speedup]',
'requests',
'requests_cache'
],
classifiers=[
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
py_modules=["tvtid"],
entry_points={
"console_scripts": [
"tvtid=tvtid:main"
]
},
python_requires=">=3.5",
include_package_data=True
)
Convert readme since pypi doesnt support markdown
|
import setuptools
try:
import pypandoc
LONG_DESC = pypandoc.convert("README.md", "rst")
except(IOError, ImportError, RuntimeError):
LONG_DESC = open('README.md').read()
setuptools.setup(
name="tvtid",
version="0.1.6",
author="Christian Kirkegaard",
author_email="[email protected]",
description="Library and cli tool for querying tvtid.dk",
long_description=LONG_DESC,
license="MIT",
url="https://github.com/kirkegaard/tvtid.py",
download_url="https://github.com/kirkegaard/tvtid.py",
install_requires=[
'python-dateutil',
'fuzzywuzzy[speedup]',
'requests',
'requests_cache'
],
classifiers=[
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
py_modules=["tvtid"],
entry_points={
"console_scripts": [
"tvtid=tvtid:main"
]
},
python_requires=">=3.5",
include_package_data=True
)
|
<commit_before>import setuptools
try:
import pypandoc
LONG_DESC = pypandoc.convert("README.md", "rst")
except(IOError, ImportError, RuntimeError):
LONG_DESC = open('README.md').read()
setuptools.setup(
name="tvtid",
version="0.1.5",
author="Christian Kirkegaard",
author_email="[email protected]",
description="Library and cli tool for querying tvtid.dk",
long_description=LONG_DESC,
license="MIT",
url="https://github.com/kirkegaard/tvtid.py",
download_url="https://github.com/kirkegaard/tvtid.py",
install_requires=[
'python-dateutil',
'fuzzywuzzy[speedup]',
'requests',
'requests_cache'
],
classifiers=[
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
py_modules=["tvtid"],
entry_points={
"console_scripts": [
"tvtid=tvtid:main"
]
},
python_requires=">=3.5",
include_package_data=True
)
<commit_msg>Convert readme since pypi doesnt support markdown<commit_after>
|
import setuptools
try:
import pypandoc
LONG_DESC = pypandoc.convert("README.md", "rst")
except(IOError, ImportError, RuntimeError):
LONG_DESC = open('README.md').read()
setuptools.setup(
name="tvtid",
version="0.1.6",
author="Christian Kirkegaard",
author_email="[email protected]",
description="Library and cli tool for querying tvtid.dk",
long_description=LONG_DESC,
license="MIT",
url="https://github.com/kirkegaard/tvtid.py",
download_url="https://github.com/kirkegaard/tvtid.py",
install_requires=[
'python-dateutil',
'fuzzywuzzy[speedup]',
'requests',
'requests_cache'
],
classifiers=[
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
py_modules=["tvtid"],
entry_points={
"console_scripts": [
"tvtid=tvtid:main"
]
},
python_requires=">=3.5",
include_package_data=True
)
|
import setuptools
try:
import pypandoc
LONG_DESC = pypandoc.convert("README.md", "rst")
except(IOError, ImportError, RuntimeError):
LONG_DESC = open('README.md').read()
setuptools.setup(
name="tvtid",
version="0.1.5",
author="Christian Kirkegaard",
author_email="[email protected]",
description="Library and cli tool for querying tvtid.dk",
long_description=LONG_DESC,
license="MIT",
url="https://github.com/kirkegaard/tvtid.py",
download_url="https://github.com/kirkegaard/tvtid.py",
install_requires=[
'python-dateutil',
'fuzzywuzzy[speedup]',
'requests',
'requests_cache'
],
classifiers=[
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
py_modules=["tvtid"],
entry_points={
"console_scripts": [
"tvtid=tvtid:main"
]
},
python_requires=">=3.5",
include_package_data=True
)
Convert readme since pypi doesnt support markdownimport setuptools
try:
import pypandoc
LONG_DESC = pypandoc.convert("README.md", "rst")
except(IOError, ImportError, RuntimeError):
LONG_DESC = open('README.md').read()
setuptools.setup(
name="tvtid",
version="0.1.6",
author="Christian Kirkegaard",
author_email="[email protected]",
description="Library and cli tool for querying tvtid.dk",
long_description=LONG_DESC,
license="MIT",
url="https://github.com/kirkegaard/tvtid.py",
download_url="https://github.com/kirkegaard/tvtid.py",
install_requires=[
'python-dateutil',
'fuzzywuzzy[speedup]',
'requests',
'requests_cache'
],
classifiers=[
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
py_modules=["tvtid"],
entry_points={
"console_scripts": [
"tvtid=tvtid:main"
]
},
python_requires=">=3.5",
include_package_data=True
)
|
<commit_before>import setuptools
try:
import pypandoc
LONG_DESC = pypandoc.convert("README.md", "rst")
except(IOError, ImportError, RuntimeError):
LONG_DESC = open('README.md').read()
setuptools.setup(
name="tvtid",
version="0.1.5",
author="Christian Kirkegaard",
author_email="[email protected]",
description="Library and cli tool for querying tvtid.dk",
long_description=LONG_DESC,
license="MIT",
url="https://github.com/kirkegaard/tvtid.py",
download_url="https://github.com/kirkegaard/tvtid.py",
install_requires=[
'python-dateutil',
'fuzzywuzzy[speedup]',
'requests',
'requests_cache'
],
classifiers=[
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
py_modules=["tvtid"],
entry_points={
"console_scripts": [
"tvtid=tvtid:main"
]
},
python_requires=">=3.5",
include_package_data=True
)
<commit_msg>Convert readme since pypi doesnt support markdown<commit_after>import setuptools
try:
import pypandoc
LONG_DESC = pypandoc.convert("README.md", "rst")
except(IOError, ImportError, RuntimeError):
LONG_DESC = open('README.md').read()
setuptools.setup(
name="tvtid",
version="0.1.6",
author="Christian Kirkegaard",
author_email="[email protected]",
description="Library and cli tool for querying tvtid.dk",
long_description=LONG_DESC,
license="MIT",
url="https://github.com/kirkegaard/tvtid.py",
download_url="https://github.com/kirkegaard/tvtid.py",
install_requires=[
'python-dateutil',
'fuzzywuzzy[speedup]',
'requests',
'requests_cache'
],
classifiers=[
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
py_modules=["tvtid"],
entry_points={
"console_scripts": [
"tvtid=tvtid:main"
]
},
python_requires=">=3.5",
include_package_data=True
)
|
9c46a56b64a9d08218b5c0cbb8e88c98b5dc3787
|
setup.py
|
setup.py
|
# Copyright 2016 Florian Lehner. All rights reserved.
#
# The contents of this file are licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
from setuptools import setup, find_packages
setup(
name='panonoctl',
version='0.3',
py_modules=['panonoctl'],
packages=find_packages(),
long_description=open('README.md').read(),
include_package_data=True,
description = 'Python API to interact with the PANONO 360-camera',
author = 'Florian Lehner',
author_email = '[email protected]',
url = 'https://github.com/florianl/panonoctl/',
download_url = 'https://github.com/florianl/panonoctl/archive/master.tar.gz',
keywords = ['Panono', 'API '],
install_requires=['websocket', 'simplejson', 'socket', 'struct'],
classifiers=[ 'Development Status :: 4 - Beta',
'Intended Audience :: Developers'
],
license = 'Apache License 2.0'
)
|
# Copyright 2016 Florian Lehner. All rights reserved.
#
# The contents of this file are licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
from setuptools import setup, find_packages
with open('README.md') as f:
readme = f.read()
setup(
name='panonoctl',
version='0.3',
py_modules=['panonoctl'],
packages=find_packages(),
long_description=readme,
include_package_data=True,
description = 'Python API to interact with the PANONO 360-camera',
author = 'Florian Lehner',
author_email = '[email protected]',
url = 'https://github.com/florianl/panonoctl/',
download_url = 'https://github.com/florianl/panonoctl/archive/master.tar.gz',
keywords = ['Panono', 'API '],
install_requires=['websocket', 'simplejson', 'socket', 'struct'],
classifiers=[ 'Development Status :: 4 - Beta',
'Intended Audience :: Developers'
],
license = 'Apache License 2.0'
)
|
Fix issue while installation via pip
|
Fix issue while installation via pip
Signed-off-by: Lehner Florian <[email protected]>
|
Python
|
apache-2.0
|
florianl/panonoctl
|
# Copyright 2016 Florian Lehner. All rights reserved.
#
# The contents of this file are licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
from setuptools import setup, find_packages
setup(
name='panonoctl',
version='0.3',
py_modules=['panonoctl'],
packages=find_packages(),
long_description=open('README.md').read(),
include_package_data=True,
description = 'Python API to interact with the PANONO 360-camera',
author = 'Florian Lehner',
author_email = '[email protected]',
url = 'https://github.com/florianl/panonoctl/',
download_url = 'https://github.com/florianl/panonoctl/archive/master.tar.gz',
keywords = ['Panono', 'API '],
install_requires=['websocket', 'simplejson', 'socket', 'struct'],
classifiers=[ 'Development Status :: 4 - Beta',
'Intended Audience :: Developers'
],
license = 'Apache License 2.0'
)
Fix issue while installation via pip
Signed-off-by: Lehner Florian <[email protected]>
|
# Copyright 2016 Florian Lehner. All rights reserved.
#
# The contents of this file are licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
from setuptools import setup, find_packages
with open('README.md') as f:
readme = f.read()
setup(
name='panonoctl',
version='0.3',
py_modules=['panonoctl'],
packages=find_packages(),
long_description=readme,
include_package_data=True,
description = 'Python API to interact with the PANONO 360-camera',
author = 'Florian Lehner',
author_email = '[email protected]',
url = 'https://github.com/florianl/panonoctl/',
download_url = 'https://github.com/florianl/panonoctl/archive/master.tar.gz',
keywords = ['Panono', 'API '],
install_requires=['websocket', 'simplejson', 'socket', 'struct'],
classifiers=[ 'Development Status :: 4 - Beta',
'Intended Audience :: Developers'
],
license = 'Apache License 2.0'
)
|
<commit_before># Copyright 2016 Florian Lehner. All rights reserved.
#
# The contents of this file are licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
from setuptools import setup, find_packages
setup(
name='panonoctl',
version='0.3',
py_modules=['panonoctl'],
packages=find_packages(),
long_description=open('README.md').read(),
include_package_data=True,
description = 'Python API to interact with the PANONO 360-camera',
author = 'Florian Lehner',
author_email = '[email protected]',
url = 'https://github.com/florianl/panonoctl/',
download_url = 'https://github.com/florianl/panonoctl/archive/master.tar.gz',
keywords = ['Panono', 'API '],
install_requires=['websocket', 'simplejson', 'socket', 'struct'],
classifiers=[ 'Development Status :: 4 - Beta',
'Intended Audience :: Developers'
],
license = 'Apache License 2.0'
)
<commit_msg>Fix issue while installation via pip
Signed-off-by: Lehner Florian <[email protected]><commit_after>
|
# Copyright 2016 Florian Lehner. All rights reserved.
#
# The contents of this file are licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
from setuptools import setup, find_packages
with open('README.md') as f:
readme = f.read()
setup(
name='panonoctl',
version='0.3',
py_modules=['panonoctl'],
packages=find_packages(),
long_description=readme,
include_package_data=True,
description = 'Python API to interact with the PANONO 360-camera',
author = 'Florian Lehner',
author_email = '[email protected]',
url = 'https://github.com/florianl/panonoctl/',
download_url = 'https://github.com/florianl/panonoctl/archive/master.tar.gz',
keywords = ['Panono', 'API '],
install_requires=['websocket', 'simplejson', 'socket', 'struct'],
classifiers=[ 'Development Status :: 4 - Beta',
'Intended Audience :: Developers'
],
license = 'Apache License 2.0'
)
|
# Copyright 2016 Florian Lehner. All rights reserved.
#
# The contents of this file are licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
from setuptools import setup, find_packages
setup(
name='panonoctl',
version='0.3',
py_modules=['panonoctl'],
packages=find_packages(),
long_description=open('README.md').read(),
include_package_data=True,
description = 'Python API to interact with the PANONO 360-camera',
author = 'Florian Lehner',
author_email = '[email protected]',
url = 'https://github.com/florianl/panonoctl/',
download_url = 'https://github.com/florianl/panonoctl/archive/master.tar.gz',
keywords = ['Panono', 'API '],
install_requires=['websocket', 'simplejson', 'socket', 'struct'],
classifiers=[ 'Development Status :: 4 - Beta',
'Intended Audience :: Developers'
],
license = 'Apache License 2.0'
)
Fix issue while installation via pip
Signed-off-by: Lehner Florian <[email protected]># Copyright 2016 Florian Lehner. All rights reserved.
#
# The contents of this file are licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
from setuptools import setup, find_packages
with open('README.md') as f:
readme = f.read()
setup(
name='panonoctl',
version='0.3',
py_modules=['panonoctl'],
packages=find_packages(),
long_description=readme,
include_package_data=True,
description = 'Python API to interact with the PANONO 360-camera',
author = 'Florian Lehner',
author_email = '[email protected]',
url = 'https://github.com/florianl/panonoctl/',
download_url = 'https://github.com/florianl/panonoctl/archive/master.tar.gz',
keywords = ['Panono', 'API '],
install_requires=['websocket', 'simplejson', 'socket', 'struct'],
classifiers=[ 'Development Status :: 4 - Beta',
'Intended Audience :: Developers'
],
license = 'Apache License 2.0'
)
|
<commit_before># Copyright 2016 Florian Lehner. All rights reserved.
#
# The contents of this file are licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
from setuptools import setup, find_packages
setup(
name='panonoctl',
version='0.3',
py_modules=['panonoctl'],
packages=find_packages(),
long_description=open('README.md').read(),
include_package_data=True,
description = 'Python API to interact with the PANONO 360-camera',
author = 'Florian Lehner',
author_email = '[email protected]',
url = 'https://github.com/florianl/panonoctl/',
download_url = 'https://github.com/florianl/panonoctl/archive/master.tar.gz',
keywords = ['Panono', 'API '],
install_requires=['websocket', 'simplejson', 'socket', 'struct'],
classifiers=[ 'Development Status :: 4 - Beta',
'Intended Audience :: Developers'
],
license = 'Apache License 2.0'
)
<commit_msg>Fix issue while installation via pip
Signed-off-by: Lehner Florian <[email protected]><commit_after># Copyright 2016 Florian Lehner. All rights reserved.
#
# The contents of this file are licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
from setuptools import setup, find_packages
with open('README.md') as f:
readme = f.read()
setup(
name='panonoctl',
version='0.3',
py_modules=['panonoctl'],
packages=find_packages(),
long_description=readme,
include_package_data=True,
description = 'Python API to interact with the PANONO 360-camera',
author = 'Florian Lehner',
author_email = '[email protected]',
url = 'https://github.com/florianl/panonoctl/',
download_url = 'https://github.com/florianl/panonoctl/archive/master.tar.gz',
keywords = ['Panono', 'API '],
install_requires=['websocket', 'simplejson', 'socket', 'struct'],
classifiers=[ 'Development Status :: 4 - Beta',
'Intended Audience :: Developers'
],
license = 'Apache License 2.0'
)
|
7f7b3a73b33e6a2857520dd8b9e00b2dc17a04f3
|
setup.py
|
setup.py
|
from setuptools import setup
def listify(filename):
return filter(None, open(filename, 'r').read().strip('\n').split('\n'))
setup(
name="distributex",
version="0.1",
url='http://github.com/calston/distributex',
license='MIT',
description="Distributex. A network mutex service for distributed"
"environments.",
long_description=open('README.md', 'r').read(),
author='Colin Alston',
author_email='[email protected]',
packages=[
"distributex",
"twisted.plugins",
],
package_data={
'twisted.plugins': ['twisted/plugins/distributex_plugin.py']
},
include_package_data=True,
install_requires=listify('requirements.txt'),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: System :: Clustering',
'Topic :: System :: Distributed Computing',
],
)
|
from setuptools import setup
def listify(filename):
return filter(None, open(filename, 'r').read().strip('\n').split('\n'))
setup(
name="distributex",
version="0.1",
url='http://github.com/calston/distributex',
license='MIT',
description="A network mutex service for distributed environments.",
long_description=open('README.md', 'r').read(),
author='Colin Alston',
author_email='[email protected]',
packages=[
"distributex",
"twisted.plugins",
],
package_data={
'twisted.plugins': ['twisted/plugins/distributex_plugin.py']
},
include_package_data=True,
install_requires=listify('requirements.txt'),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: System :: Clustering',
'Topic :: System :: Distributed Computing',
],
)
|
Set a sane package discription
|
Set a sane package discription
|
Python
|
mit
|
calston/distributex
|
from setuptools import setup
def listify(filename):
return filter(None, open(filename, 'r').read().strip('\n').split('\n'))
setup(
name="distributex",
version="0.1",
url='http://github.com/calston/distributex',
license='MIT',
description="Distributex. A network mutex service for distributed"
"environments.",
long_description=open('README.md', 'r').read(),
author='Colin Alston',
author_email='[email protected]',
packages=[
"distributex",
"twisted.plugins",
],
package_data={
'twisted.plugins': ['twisted/plugins/distributex_plugin.py']
},
include_package_data=True,
install_requires=listify('requirements.txt'),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: System :: Clustering',
'Topic :: System :: Distributed Computing',
],
)
Set a sane package discription
|
from setuptools import setup
def listify(filename):
return filter(None, open(filename, 'r').read().strip('\n').split('\n'))
setup(
name="distributex",
version="0.1",
url='http://github.com/calston/distributex',
license='MIT',
description="A network mutex service for distributed environments.",
long_description=open('README.md', 'r').read(),
author='Colin Alston',
author_email='[email protected]',
packages=[
"distributex",
"twisted.plugins",
],
package_data={
'twisted.plugins': ['twisted/plugins/distributex_plugin.py']
},
include_package_data=True,
install_requires=listify('requirements.txt'),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: System :: Clustering',
'Topic :: System :: Distributed Computing',
],
)
|
<commit_before>from setuptools import setup
def listify(filename):
return filter(None, open(filename, 'r').read().strip('\n').split('\n'))
setup(
name="distributex",
version="0.1",
url='http://github.com/calston/distributex',
license='MIT',
description="Distributex. A network mutex service for distributed"
"environments.",
long_description=open('README.md', 'r').read(),
author='Colin Alston',
author_email='[email protected]',
packages=[
"distributex",
"twisted.plugins",
],
package_data={
'twisted.plugins': ['twisted/plugins/distributex_plugin.py']
},
include_package_data=True,
install_requires=listify('requirements.txt'),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: System :: Clustering',
'Topic :: System :: Distributed Computing',
],
)
<commit_msg>Set a sane package discription<commit_after>
|
from setuptools import setup
def listify(filename):
return filter(None, open(filename, 'r').read().strip('\n').split('\n'))
setup(
name="distributex",
version="0.1",
url='http://github.com/calston/distributex',
license='MIT',
description="A network mutex service for distributed environments.",
long_description=open('README.md', 'r').read(),
author='Colin Alston',
author_email='[email protected]',
packages=[
"distributex",
"twisted.plugins",
],
package_data={
'twisted.plugins': ['twisted/plugins/distributex_plugin.py']
},
include_package_data=True,
install_requires=listify('requirements.txt'),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: System :: Clustering',
'Topic :: System :: Distributed Computing',
],
)
|
from setuptools import setup
def listify(filename):
return filter(None, open(filename, 'r').read().strip('\n').split('\n'))
setup(
name="distributex",
version="0.1",
url='http://github.com/calston/distributex',
license='MIT',
description="Distributex. A network mutex service for distributed"
"environments.",
long_description=open('README.md', 'r').read(),
author='Colin Alston',
author_email='[email protected]',
packages=[
"distributex",
"twisted.plugins",
],
package_data={
'twisted.plugins': ['twisted/plugins/distributex_plugin.py']
},
include_package_data=True,
install_requires=listify('requirements.txt'),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: System :: Clustering',
'Topic :: System :: Distributed Computing',
],
)
Set a sane package discriptionfrom setuptools import setup
def listify(filename):
return filter(None, open(filename, 'r').read().strip('\n').split('\n'))
setup(
name="distributex",
version="0.1",
url='http://github.com/calston/distributex',
license='MIT',
description="A network mutex service for distributed environments.",
long_description=open('README.md', 'r').read(),
author='Colin Alston',
author_email='[email protected]',
packages=[
"distributex",
"twisted.plugins",
],
package_data={
'twisted.plugins': ['twisted/plugins/distributex_plugin.py']
},
include_package_data=True,
install_requires=listify('requirements.txt'),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: System :: Clustering',
'Topic :: System :: Distributed Computing',
],
)
|
<commit_before>from setuptools import setup
def listify(filename):
return filter(None, open(filename, 'r').read().strip('\n').split('\n'))
setup(
name="distributex",
version="0.1",
url='http://github.com/calston/distributex',
license='MIT',
description="Distributex. A network mutex service for distributed"
"environments.",
long_description=open('README.md', 'r').read(),
author='Colin Alston',
author_email='[email protected]',
packages=[
"distributex",
"twisted.plugins",
],
package_data={
'twisted.plugins': ['twisted/plugins/distributex_plugin.py']
},
include_package_data=True,
install_requires=listify('requirements.txt'),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: System :: Clustering',
'Topic :: System :: Distributed Computing',
],
)
<commit_msg>Set a sane package discription<commit_after>from setuptools import setup
def listify(filename):
return filter(None, open(filename, 'r').read().strip('\n').split('\n'))
setup(
name="distributex",
version="0.1",
url='http://github.com/calston/distributex',
license='MIT',
description="A network mutex service for distributed environments.",
long_description=open('README.md', 'r').read(),
author='Colin Alston',
author_email='[email protected]',
packages=[
"distributex",
"twisted.plugins",
],
package_data={
'twisted.plugins': ['twisted/plugins/distributex_plugin.py']
},
include_package_data=True,
install_requires=listify('requirements.txt'),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: System :: Clustering',
'Topic :: System :: Distributed Computing',
],
)
|
8ddc7ecbc768ae25b1d0e74ecfeb9045fb461d16
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='Flask-DebugToolbar',
version='0.04',
url='http://github.com/mvantellingen/flask-debugtoolbar',
license='BSD',
author='Michael van Tellingen',
author_email='[email protected]',
description='A port of the Django debug toolbar to Flask',
long_description=__doc__,
packages=find_packages(),
package_dir={'flaskext.debugtoolbar': 'flaskext/debugtoolbar'},
package_data={'flaskext.debugtoolbar': [
'static/css/*.css', 'static/js/*.js', 'static/img/*',
'templates/*.html', 'templates/panels/*.html'
]},
namespace_packages=['flaskext'],
zip_safe=False,
platforms='any',
install_requires=[
'setuptools',
'Flask'
],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
from setuptools import setup, find_packages
setup(
name='Flask-DebugToolbar',
version='0.4',
url='http://github.com/mvantellingen/flask-debugtoolbar',
license='BSD',
author='Michael van Tellingen',
author_email='[email protected]',
description='A port of the Django debug toolbar to Flask',
long_description=__doc__,
packages=find_packages(),
package_dir={'flaskext.debugtoolbar': 'flaskext/debugtoolbar'},
package_data={'flaskext.debugtoolbar': [
'static/css/*.css', 'static/js/*.js', 'static/img/*',
'templates/*.html', 'templates/panels/*.html'
]},
namespace_packages=['flaskext'],
zip_safe=False,
platforms='any',
install_requires=[
'setuptools',
'Flask',
'blinker'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
Add blinker as install dependency (used for signals within flask)
|
Add blinker as install dependency (used for signals within flask)
|
Python
|
bsd-3-clause
|
dianchang/flask-debugtoolbar,dianchang/flask-debugtoolbar,lepture/flask-debugtoolbar,lepture/flask-debugtoolbar,dianchang/flask-debugtoolbar
|
from setuptools import setup, find_packages
setup(
name='Flask-DebugToolbar',
version='0.04',
url='http://github.com/mvantellingen/flask-debugtoolbar',
license='BSD',
author='Michael van Tellingen',
author_email='[email protected]',
description='A port of the Django debug toolbar to Flask',
long_description=__doc__,
packages=find_packages(),
package_dir={'flaskext.debugtoolbar': 'flaskext/debugtoolbar'},
package_data={'flaskext.debugtoolbar': [
'static/css/*.css', 'static/js/*.js', 'static/img/*',
'templates/*.html', 'templates/panels/*.html'
]},
namespace_packages=['flaskext'],
zip_safe=False,
platforms='any',
install_requires=[
'setuptools',
'Flask'
],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
Add blinker as install dependency (used for signals within flask)
|
from setuptools import setup, find_packages
setup(
name='Flask-DebugToolbar',
version='0.4',
url='http://github.com/mvantellingen/flask-debugtoolbar',
license='BSD',
author='Michael van Tellingen',
author_email='[email protected]',
description='A port of the Django debug toolbar to Flask',
long_description=__doc__,
packages=find_packages(),
package_dir={'flaskext.debugtoolbar': 'flaskext/debugtoolbar'},
package_data={'flaskext.debugtoolbar': [
'static/css/*.css', 'static/js/*.js', 'static/img/*',
'templates/*.html', 'templates/panels/*.html'
]},
namespace_packages=['flaskext'],
zip_safe=False,
platforms='any',
install_requires=[
'setuptools',
'Flask',
'blinker'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='Flask-DebugToolbar',
version='0.04',
url='http://github.com/mvantellingen/flask-debugtoolbar',
license='BSD',
author='Michael van Tellingen',
author_email='[email protected]',
description='A port of the Django debug toolbar to Flask',
long_description=__doc__,
packages=find_packages(),
package_dir={'flaskext.debugtoolbar': 'flaskext/debugtoolbar'},
package_data={'flaskext.debugtoolbar': [
'static/css/*.css', 'static/js/*.js', 'static/img/*',
'templates/*.html', 'templates/panels/*.html'
]},
namespace_packages=['flaskext'],
zip_safe=False,
platforms='any',
install_requires=[
'setuptools',
'Flask'
],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
<commit_msg>Add blinker as install dependency (used for signals within flask)<commit_after>
|
from setuptools import setup, find_packages
setup(
name='Flask-DebugToolbar',
version='0.4',
url='http://github.com/mvantellingen/flask-debugtoolbar',
license='BSD',
author='Michael van Tellingen',
author_email='[email protected]',
description='A port of the Django debug toolbar to Flask',
long_description=__doc__,
packages=find_packages(),
package_dir={'flaskext.debugtoolbar': 'flaskext/debugtoolbar'},
package_data={'flaskext.debugtoolbar': [
'static/css/*.css', 'static/js/*.js', 'static/img/*',
'templates/*.html', 'templates/panels/*.html'
]},
namespace_packages=['flaskext'],
zip_safe=False,
platforms='any',
install_requires=[
'setuptools',
'Flask',
'blinker'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
from setuptools import setup, find_packages
setup(
name='Flask-DebugToolbar',
version='0.04',
url='http://github.com/mvantellingen/flask-debugtoolbar',
license='BSD',
author='Michael van Tellingen',
author_email='[email protected]',
description='A port of the Django debug toolbar to Flask',
long_description=__doc__,
packages=find_packages(),
package_dir={'flaskext.debugtoolbar': 'flaskext/debugtoolbar'},
package_data={'flaskext.debugtoolbar': [
'static/css/*.css', 'static/js/*.js', 'static/img/*',
'templates/*.html', 'templates/panels/*.html'
]},
namespace_packages=['flaskext'],
zip_safe=False,
platforms='any',
install_requires=[
'setuptools',
'Flask'
],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
Add blinker as install dependency (used for signals within flask)from setuptools import setup, find_packages
setup(
name='Flask-DebugToolbar',
version='0.4',
url='http://github.com/mvantellingen/flask-debugtoolbar',
license='BSD',
author='Michael van Tellingen',
author_email='[email protected]',
description='A port of the Django debug toolbar to Flask',
long_description=__doc__,
packages=find_packages(),
package_dir={'flaskext.debugtoolbar': 'flaskext/debugtoolbar'},
package_data={'flaskext.debugtoolbar': [
'static/css/*.css', 'static/js/*.js', 'static/img/*',
'templates/*.html', 'templates/panels/*.html'
]},
namespace_packages=['flaskext'],
zip_safe=False,
platforms='any',
install_requires=[
'setuptools',
'Flask',
'blinker'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='Flask-DebugToolbar',
version='0.04',
url='http://github.com/mvantellingen/flask-debugtoolbar',
license='BSD',
author='Michael van Tellingen',
author_email='[email protected]',
description='A port of the Django debug toolbar to Flask',
long_description=__doc__,
packages=find_packages(),
package_dir={'flaskext.debugtoolbar': 'flaskext/debugtoolbar'},
package_data={'flaskext.debugtoolbar': [
'static/css/*.css', 'static/js/*.js', 'static/img/*',
'templates/*.html', 'templates/panels/*.html'
]},
namespace_packages=['flaskext'],
zip_safe=False,
platforms='any',
install_requires=[
'setuptools',
'Flask'
],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
<commit_msg>Add blinker as install dependency (used for signals within flask)<commit_after>from setuptools import setup, find_packages
setup(
name='Flask-DebugToolbar',
version='0.4',
url='http://github.com/mvantellingen/flask-debugtoolbar',
license='BSD',
author='Michael van Tellingen',
author_email='[email protected]',
description='A port of the Django debug toolbar to Flask',
long_description=__doc__,
packages=find_packages(),
package_dir={'flaskext.debugtoolbar': 'flaskext/debugtoolbar'},
package_data={'flaskext.debugtoolbar': [
'static/css/*.css', 'static/js/*.js', 'static/img/*',
'templates/*.html', 'templates/panels/*.html'
]},
namespace_packages=['flaskext'],
zip_safe=False,
platforms='any',
install_requires=[
'setuptools',
'Flask',
'blinker'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
5d93a71fee4b53000a3a5bbacd7d24f1caf11528
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from setuptools import setup
import versioneer
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, "README.md"), encoding="utf-8") as f:
long_description = f.read()
setup(
name="fsspec",
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
description="File-system specification",
long_description=long_description,
long_description_content_type="text/markdown",
url="http://github.com/intake/filesystem_spec",
maintainer="Martin Durant",
maintainer_email="[email protected]",
license="BSD",
keywords="file",
packages=["fsspec", "fsspec.implementations"],
python_requires=">3.6",
install_requires=open("requirements.txt").read().strip().split("\n"),
extras_require={"http": ["requests", "aiohttp"]},
zip_safe=False,
)
|
#!/usr/bin/env python
import os
from setuptools import setup
import versioneer
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, "README.md"), encoding="utf-8") as f:
long_description = f.read()
setup(
name="fsspec",
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
description="File-system specification",
long_description=long_description,
long_description_content_type="text/markdown",
url="http://github.com/intake/filesystem_spec",
maintainer="Martin Durant",
maintainer_email="[email protected]",
license="BSD",
keywords="file",
packages=["fsspec", "fsspec.implementations"],
python_requires=">3.6",
install_requires=open("requirements.txt").read().strip().split("\n"),
extras_require={
"aws": ["s3fs"],
"azure": ["adlfs"],
"dask": ["dask", "distributed"],
"dropbox": ["dropboxdrivefs", "requests", "dropbox"],
"git": ["pygit2"],
"github": ["requests"],
"google": ["gcsfs"],
"hdfs": ["pyarrow"],
"http": ["requests", "aiohttp"],
"smb": ["smbprotocol"],
"ssh": ["paramiko"],
"webhdfs": ["requests"],
},
zip_safe=False,
)
|
Add more extras based on what's in registry.py
|
Add more extras based on what's in registry.py
|
Python
|
bsd-3-clause
|
fsspec/filesystem_spec,intake/filesystem_spec,fsspec/filesystem_spec
|
#!/usr/bin/env python
import os
from setuptools import setup
import versioneer
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, "README.md"), encoding="utf-8") as f:
long_description = f.read()
setup(
name="fsspec",
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
description="File-system specification",
long_description=long_description,
long_description_content_type="text/markdown",
url="http://github.com/intake/filesystem_spec",
maintainer="Martin Durant",
maintainer_email="[email protected]",
license="BSD",
keywords="file",
packages=["fsspec", "fsspec.implementations"],
python_requires=">3.6",
install_requires=open("requirements.txt").read().strip().split("\n"),
extras_require={"http": ["requests", "aiohttp"]},
zip_safe=False,
)
Add more extras based on what's in registry.py
|
#!/usr/bin/env python
import os
from setuptools import setup
import versioneer
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, "README.md"), encoding="utf-8") as f:
long_description = f.read()
setup(
name="fsspec",
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
description="File-system specification",
long_description=long_description,
long_description_content_type="text/markdown",
url="http://github.com/intake/filesystem_spec",
maintainer="Martin Durant",
maintainer_email="[email protected]",
license="BSD",
keywords="file",
packages=["fsspec", "fsspec.implementations"],
python_requires=">3.6",
install_requires=open("requirements.txt").read().strip().split("\n"),
extras_require={
"aws": ["s3fs"],
"azure": ["adlfs"],
"dask": ["dask", "distributed"],
"dropbox": ["dropboxdrivefs", "requests", "dropbox"],
"git": ["pygit2"],
"github": ["requests"],
"google": ["gcsfs"],
"hdfs": ["pyarrow"],
"http": ["requests", "aiohttp"],
"smb": ["smbprotocol"],
"ssh": ["paramiko"],
"webhdfs": ["requests"],
},
zip_safe=False,
)
|
<commit_before>#!/usr/bin/env python
import os
from setuptools import setup
import versioneer
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, "README.md"), encoding="utf-8") as f:
long_description = f.read()
setup(
name="fsspec",
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
description="File-system specification",
long_description=long_description,
long_description_content_type="text/markdown",
url="http://github.com/intake/filesystem_spec",
maintainer="Martin Durant",
maintainer_email="[email protected]",
license="BSD",
keywords="file",
packages=["fsspec", "fsspec.implementations"],
python_requires=">3.6",
install_requires=open("requirements.txt").read().strip().split("\n"),
extras_require={"http": ["requests", "aiohttp"]},
zip_safe=False,
)
<commit_msg>Add more extras based on what's in registry.py<commit_after>
|
#!/usr/bin/env python
import os
from setuptools import setup
import versioneer
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, "README.md"), encoding="utf-8") as f:
long_description = f.read()
setup(
name="fsspec",
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
description="File-system specification",
long_description=long_description,
long_description_content_type="text/markdown",
url="http://github.com/intake/filesystem_spec",
maintainer="Martin Durant",
maintainer_email="[email protected]",
license="BSD",
keywords="file",
packages=["fsspec", "fsspec.implementations"],
python_requires=">3.6",
install_requires=open("requirements.txt").read().strip().split("\n"),
extras_require={
"aws": ["s3fs"],
"azure": ["adlfs"],
"dask": ["dask", "distributed"],
"dropbox": ["dropboxdrivefs", "requests", "dropbox"],
"git": ["pygit2"],
"github": ["requests"],
"google": ["gcsfs"],
"hdfs": ["pyarrow"],
"http": ["requests", "aiohttp"],
"smb": ["smbprotocol"],
"ssh": ["paramiko"],
"webhdfs": ["requests"],
},
zip_safe=False,
)
|
#!/usr/bin/env python
import os
from setuptools import setup
import versioneer
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, "README.md"), encoding="utf-8") as f:
long_description = f.read()
setup(
name="fsspec",
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
description="File-system specification",
long_description=long_description,
long_description_content_type="text/markdown",
url="http://github.com/intake/filesystem_spec",
maintainer="Martin Durant",
maintainer_email="[email protected]",
license="BSD",
keywords="file",
packages=["fsspec", "fsspec.implementations"],
python_requires=">3.6",
install_requires=open("requirements.txt").read().strip().split("\n"),
extras_require={"http": ["requests", "aiohttp"]},
zip_safe=False,
)
Add more extras based on what's in registry.py#!/usr/bin/env python
import os
from setuptools import setup
import versioneer
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, "README.md"), encoding="utf-8") as f:
long_description = f.read()
setup(
name="fsspec",
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
description="File-system specification",
long_description=long_description,
long_description_content_type="text/markdown",
url="http://github.com/intake/filesystem_spec",
maintainer="Martin Durant",
maintainer_email="[email protected]",
license="BSD",
keywords="file",
packages=["fsspec", "fsspec.implementations"],
python_requires=">3.6",
install_requires=open("requirements.txt").read().strip().split("\n"),
extras_require={
"aws": ["s3fs"],
"azure": ["adlfs"],
"dask": ["dask", "distributed"],
"dropbox": ["dropboxdrivefs", "requests", "dropbox"],
"git": ["pygit2"],
"github": ["requests"],
"google": ["gcsfs"],
"hdfs": ["pyarrow"],
"http": ["requests", "aiohttp"],
"smb": ["smbprotocol"],
"ssh": ["paramiko"],
"webhdfs": ["requests"],
},
zip_safe=False,
)
|
<commit_before>#!/usr/bin/env python
import os
from setuptools import setup
import versioneer
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, "README.md"), encoding="utf-8") as f:
long_description = f.read()
setup(
name="fsspec",
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
description="File-system specification",
long_description=long_description,
long_description_content_type="text/markdown",
url="http://github.com/intake/filesystem_spec",
maintainer="Martin Durant",
maintainer_email="[email protected]",
license="BSD",
keywords="file",
packages=["fsspec", "fsspec.implementations"],
python_requires=">3.6",
install_requires=open("requirements.txt").read().strip().split("\n"),
extras_require={"http": ["requests", "aiohttp"]},
zip_safe=False,
)
<commit_msg>Add more extras based on what's in registry.py<commit_after>#!/usr/bin/env python
import os
from setuptools import setup
import versioneer
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, "README.md"), encoding="utf-8") as f:
long_description = f.read()
setup(
name="fsspec",
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
description="File-system specification",
long_description=long_description,
long_description_content_type="text/markdown",
url="http://github.com/intake/filesystem_spec",
maintainer="Martin Durant",
maintainer_email="[email protected]",
license="BSD",
keywords="file",
packages=["fsspec", "fsspec.implementations"],
python_requires=">3.6",
install_requires=open("requirements.txt").read().strip().split("\n"),
extras_require={
"aws": ["s3fs"],
"azure": ["adlfs"],
"dask": ["dask", "distributed"],
"dropbox": ["dropboxdrivefs", "requests", "dropbox"],
"git": ["pygit2"],
"github": ["requests"],
"google": ["gcsfs"],
"hdfs": ["pyarrow"],
"http": ["requests", "aiohttp"],
"smb": ["smbprotocol"],
"ssh": ["paramiko"],
"webhdfs": ["requests"],
},
zip_safe=False,
)
|
1a60f395ec314b085bb12e132bf44c2ec8be1663
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
from setuptools import setup
exec(open('manatools/version.py').read())
try:
import yui
except ImportError:
import sys
print('Please install python3-yui in order to install this package',
file=sys.stderr)
sys.exit(1)
setup(
name=__project_name__,
version=__project_version__,
author='Angelo Naselli',
author_email='[email protected]',
packages=['manatools', 'manatools.ui'],
#scripts=['scripts/'],
license='LGPLv2+',
description='Python ManaTools framework.',
long_description=open('README.md').read(),
#data_files=[('conf/manatools', ['XXX.yy',]), ],
install_requires=[
"argparse",
"dbus-python",
"python-gettext",
"PyYAML",
],
)
|
#!/usr/bin/env python3
from setuptools import setup
exec(open('manatools/version.py').read())
try:
import yui
except ImportError:
import sys
print('Please install python3-yui in order to install this package',
file=sys.stderr)
sys.exit(1)
setup(
name=__project_name__,
version=__project_version__,
author='Angelo Naselli',
author_email='[email protected]',
packages=['manatools', 'manatools.ui'],
#scripts=['scripts/'],
license='LGPLv2+',
description='Python ManaTools framework.',
long_description=open('README.md').read(),
#data_files=[('conf/manatools', ['XXX.yy',]), ],
install_requires=[
"dbus-python",
"python-gettext",
"PyYAML",
],
)
|
Drop argparse as a dependency
|
Drop argparse as a dependency
argparse has been part of the standard library since Python 2.7,
so there's no reason to declare this as a dependency, since it cannot
be satisfied by anyone running a modern Linux distribution including
a supported version of Python.
|
Python
|
lgpl-2.1
|
manatools/python-manatools,manatools/python-manatools
|
#!/usr/bin/env python3
from setuptools import setup
exec(open('manatools/version.py').read())
try:
import yui
except ImportError:
import sys
print('Please install python3-yui in order to install this package',
file=sys.stderr)
sys.exit(1)
setup(
name=__project_name__,
version=__project_version__,
author='Angelo Naselli',
author_email='[email protected]',
packages=['manatools', 'manatools.ui'],
#scripts=['scripts/'],
license='LGPLv2+',
description='Python ManaTools framework.',
long_description=open('README.md').read(),
#data_files=[('conf/manatools', ['XXX.yy',]), ],
install_requires=[
"argparse",
"dbus-python",
"python-gettext",
"PyYAML",
],
)
Drop argparse as a dependency
argparse has been part of the standard library since Python 2.7,
so there's no reason to declare this as a dependency, since it cannot
be satisfied by anyone running a modern Linux distribution including
a supported version of Python.
|
#!/usr/bin/env python3
from setuptools import setup
exec(open('manatools/version.py').read())
try:
import yui
except ImportError:
import sys
print('Please install python3-yui in order to install this package',
file=sys.stderr)
sys.exit(1)
setup(
name=__project_name__,
version=__project_version__,
author='Angelo Naselli',
author_email='[email protected]',
packages=['manatools', 'manatools.ui'],
#scripts=['scripts/'],
license='LGPLv2+',
description='Python ManaTools framework.',
long_description=open('README.md').read(),
#data_files=[('conf/manatools', ['XXX.yy',]), ],
install_requires=[
"dbus-python",
"python-gettext",
"PyYAML",
],
)
|
<commit_before>#!/usr/bin/env python3
from setuptools import setup
exec(open('manatools/version.py').read())
try:
import yui
except ImportError:
import sys
print('Please install python3-yui in order to install this package',
file=sys.stderr)
sys.exit(1)
setup(
name=__project_name__,
version=__project_version__,
author='Angelo Naselli',
author_email='[email protected]',
packages=['manatools', 'manatools.ui'],
#scripts=['scripts/'],
license='LGPLv2+',
description='Python ManaTools framework.',
long_description=open('README.md').read(),
#data_files=[('conf/manatools', ['XXX.yy',]), ],
install_requires=[
"argparse",
"dbus-python",
"python-gettext",
"PyYAML",
],
)
<commit_msg>Drop argparse as a dependency
argparse has been part of the standard library since Python 2.7,
so there's no reason to declare this as a dependency, since it cannot
be satisfied by anyone running a modern Linux distribution including
a supported version of Python.<commit_after>
|
#!/usr/bin/env python3
from setuptools import setup
exec(open('manatools/version.py').read())
try:
import yui
except ImportError:
import sys
print('Please install python3-yui in order to install this package',
file=sys.stderr)
sys.exit(1)
setup(
name=__project_name__,
version=__project_version__,
author='Angelo Naselli',
author_email='[email protected]',
packages=['manatools', 'manatools.ui'],
#scripts=['scripts/'],
license='LGPLv2+',
description='Python ManaTools framework.',
long_description=open('README.md').read(),
#data_files=[('conf/manatools', ['XXX.yy',]), ],
install_requires=[
"dbus-python",
"python-gettext",
"PyYAML",
],
)
|
#!/usr/bin/env python3
from setuptools import setup
exec(open('manatools/version.py').read())
try:
import yui
except ImportError:
import sys
print('Please install python3-yui in order to install this package',
file=sys.stderr)
sys.exit(1)
setup(
name=__project_name__,
version=__project_version__,
author='Angelo Naselli',
author_email='[email protected]',
packages=['manatools', 'manatools.ui'],
#scripts=['scripts/'],
license='LGPLv2+',
description='Python ManaTools framework.',
long_description=open('README.md').read(),
#data_files=[('conf/manatools', ['XXX.yy',]), ],
install_requires=[
"argparse",
"dbus-python",
"python-gettext",
"PyYAML",
],
)
Drop argparse as a dependency
argparse has been part of the standard library since Python 2.7,
so there's no reason to declare this as a dependency, since it cannot
be satisfied by anyone running a modern Linux distribution including
a supported version of Python.#!/usr/bin/env python3
from setuptools import setup
exec(open('manatools/version.py').read())
try:
import yui
except ImportError:
import sys
print('Please install python3-yui in order to install this package',
file=sys.stderr)
sys.exit(1)
setup(
name=__project_name__,
version=__project_version__,
author='Angelo Naselli',
author_email='[email protected]',
packages=['manatools', 'manatools.ui'],
#scripts=['scripts/'],
license='LGPLv2+',
description='Python ManaTools framework.',
long_description=open('README.md').read(),
#data_files=[('conf/manatools', ['XXX.yy',]), ],
install_requires=[
"dbus-python",
"python-gettext",
"PyYAML",
],
)
|
<commit_before>#!/usr/bin/env python3
from setuptools import setup
exec(open('manatools/version.py').read())
try:
import yui
except ImportError:
import sys
print('Please install python3-yui in order to install this package',
file=sys.stderr)
sys.exit(1)
setup(
name=__project_name__,
version=__project_version__,
author='Angelo Naselli',
author_email='[email protected]',
packages=['manatools', 'manatools.ui'],
#scripts=['scripts/'],
license='LGPLv2+',
description='Python ManaTools framework.',
long_description=open('README.md').read(),
#data_files=[('conf/manatools', ['XXX.yy',]), ],
install_requires=[
"argparse",
"dbus-python",
"python-gettext",
"PyYAML",
],
)
<commit_msg>Drop argparse as a dependency
argparse has been part of the standard library since Python 2.7,
so there's no reason to declare this as a dependency, since it cannot
be satisfied by anyone running a modern Linux distribution including
a supported version of Python.<commit_after>#!/usr/bin/env python3
from setuptools import setup
exec(open('manatools/version.py').read())
try:
import yui
except ImportError:
import sys
print('Please install python3-yui in order to install this package',
file=sys.stderr)
sys.exit(1)
setup(
name=__project_name__,
version=__project_version__,
author='Angelo Naselli',
author_email='[email protected]',
packages=['manatools', 'manatools.ui'],
#scripts=['scripts/'],
license='LGPLv2+',
description='Python ManaTools framework.',
long_description=open('README.md').read(),
#data_files=[('conf/manatools', ['XXX.yy',]), ],
install_requires=[
"dbus-python",
"python-gettext",
"PyYAML",
],
)
|
2d1ef22d384cb04d86946572599f2040b798e6d6
|
setup.py
|
setup.py
|
#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
import numpy
import os
from distutils.errors import DistutilsError
if numpy.__dict__.get('quaternion') is not None:
raise DistutilsError('The target NumPy already has a quaternion type')
from numpy.distutils.misc_util import Configuration
from numpy.distutils.system_info import get_info
if(os.environ.get('THIS_IS_TRAVIS') is not None):
print("This appears to be Travis!")
compile_args = ['-O3']
else:
compile_args = ['-ffast-math', '-O3']
config = Configuration('quaternion',parent_package,top_path)
config.add_extension('numpy_quaternion',
['quaternion.c','numpy_quaternion.c'],
extra_compile_args=compile_args,)
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
setup(configuration=configuration)
|
#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
import numpy
import os
from distutils.errors import DistutilsError
if numpy.__dict__.get('quaternion') is not None:
raise DistutilsError('The target NumPy already has a quaternion type')
from numpy.distutils.misc_util import Configuration
from numpy.distutils.system_info import get_info
# if(os.environ.get('THIS_IS_TRAVIS') is not None):
# print("This appears to be Travis!")
# compile_args = ['-O3']
# else:
# compile_args = ['-ffast-math', '-O3']
compile_args = ['-O3']
config = Configuration('quaternion',parent_package,top_path)
config.add_extension('numpy_quaternion',
['quaternion.c','numpy_quaternion.c'],
extra_compile_args=compile_args,)
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
setup(configuration=configuration)
|
Remove --ffast-math for all builds
|
Remove --ffast-math for all builds
Due to a bug in anaconda's libm support for linux, fast-math is
unusable. And I don't want to try to hack a way to decide if it's
usable on things other than linux, because it's just one more thing to
break.
|
Python
|
mit
|
moble/quaternion,moble/quaternion
|
#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
import numpy
import os
from distutils.errors import DistutilsError
if numpy.__dict__.get('quaternion') is not None:
raise DistutilsError('The target NumPy already has a quaternion type')
from numpy.distutils.misc_util import Configuration
from numpy.distutils.system_info import get_info
if(os.environ.get('THIS_IS_TRAVIS') is not None):
print("This appears to be Travis!")
compile_args = ['-O3']
else:
compile_args = ['-ffast-math', '-O3']
config = Configuration('quaternion',parent_package,top_path)
config.add_extension('numpy_quaternion',
['quaternion.c','numpy_quaternion.c'],
extra_compile_args=compile_args,)
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
setup(configuration=configuration)
Remove --ffast-math for all builds
Due to a bug in anaconda's libm support for linux, fast-math is
unusable. And I don't want to try to hack a way to decide if it's
usable on things other than linux, because it's just one more thing to
break.
|
#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
import numpy
import os
from distutils.errors import DistutilsError
if numpy.__dict__.get('quaternion') is not None:
raise DistutilsError('The target NumPy already has a quaternion type')
from numpy.distutils.misc_util import Configuration
from numpy.distutils.system_info import get_info
# if(os.environ.get('THIS_IS_TRAVIS') is not None):
# print("This appears to be Travis!")
# compile_args = ['-O3']
# else:
# compile_args = ['-ffast-math', '-O3']
compile_args = ['-O3']
config = Configuration('quaternion',parent_package,top_path)
config.add_extension('numpy_quaternion',
['quaternion.c','numpy_quaternion.c'],
extra_compile_args=compile_args,)
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
setup(configuration=configuration)
|
<commit_before>#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
import numpy
import os
from distutils.errors import DistutilsError
if numpy.__dict__.get('quaternion') is not None:
raise DistutilsError('The target NumPy already has a quaternion type')
from numpy.distutils.misc_util import Configuration
from numpy.distutils.system_info import get_info
if(os.environ.get('THIS_IS_TRAVIS') is not None):
print("This appears to be Travis!")
compile_args = ['-O3']
else:
compile_args = ['-ffast-math', '-O3']
config = Configuration('quaternion',parent_package,top_path)
config.add_extension('numpy_quaternion',
['quaternion.c','numpy_quaternion.c'],
extra_compile_args=compile_args,)
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
setup(configuration=configuration)
<commit_msg>Remove --ffast-math for all builds
Due to a bug in anaconda's libm support for linux, fast-math is
unusable. And I don't want to try to hack a way to decide if it's
usable on things other than linux, because it's just one more thing to
break.<commit_after>
|
#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
import numpy
import os
from distutils.errors import DistutilsError
if numpy.__dict__.get('quaternion') is not None:
raise DistutilsError('The target NumPy already has a quaternion type')
from numpy.distutils.misc_util import Configuration
from numpy.distutils.system_info import get_info
# if(os.environ.get('THIS_IS_TRAVIS') is not None):
# print("This appears to be Travis!")
# compile_args = ['-O3']
# else:
# compile_args = ['-ffast-math', '-O3']
compile_args = ['-O3']
config = Configuration('quaternion',parent_package,top_path)
config.add_extension('numpy_quaternion',
['quaternion.c','numpy_quaternion.c'],
extra_compile_args=compile_args,)
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
setup(configuration=configuration)
|
#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
import numpy
import os
from distutils.errors import DistutilsError
if numpy.__dict__.get('quaternion') is not None:
raise DistutilsError('The target NumPy already has a quaternion type')
from numpy.distutils.misc_util import Configuration
from numpy.distutils.system_info import get_info
if(os.environ.get('THIS_IS_TRAVIS') is not None):
print("This appears to be Travis!")
compile_args = ['-O3']
else:
compile_args = ['-ffast-math', '-O3']
config = Configuration('quaternion',parent_package,top_path)
config.add_extension('numpy_quaternion',
['quaternion.c','numpy_quaternion.c'],
extra_compile_args=compile_args,)
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
setup(configuration=configuration)
Remove --ffast-math for all builds
Due to a bug in anaconda's libm support for linux, fast-math is
unusable. And I don't want to try to hack a way to decide if it's
usable on things other than linux, because it's just one more thing to
break.#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
import numpy
import os
from distutils.errors import DistutilsError
if numpy.__dict__.get('quaternion') is not None:
raise DistutilsError('The target NumPy already has a quaternion type')
from numpy.distutils.misc_util import Configuration
from numpy.distutils.system_info import get_info
# if(os.environ.get('THIS_IS_TRAVIS') is not None):
# print("This appears to be Travis!")
# compile_args = ['-O3']
# else:
# compile_args = ['-ffast-math', '-O3']
compile_args = ['-O3']
config = Configuration('quaternion',parent_package,top_path)
config.add_extension('numpy_quaternion',
['quaternion.c','numpy_quaternion.c'],
extra_compile_args=compile_args,)
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
setup(configuration=configuration)
|
<commit_before>#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
import numpy
import os
from distutils.errors import DistutilsError
if numpy.__dict__.get('quaternion') is not None:
raise DistutilsError('The target NumPy already has a quaternion type')
from numpy.distutils.misc_util import Configuration
from numpy.distutils.system_info import get_info
if(os.environ.get('THIS_IS_TRAVIS') is not None):
print("This appears to be Travis!")
compile_args = ['-O3']
else:
compile_args = ['-ffast-math', '-O3']
config = Configuration('quaternion',parent_package,top_path)
config.add_extension('numpy_quaternion',
['quaternion.c','numpy_quaternion.c'],
extra_compile_args=compile_args,)
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
setup(configuration=configuration)
<commit_msg>Remove --ffast-math for all builds
Due to a bug in anaconda's libm support for linux, fast-math is
unusable. And I don't want to try to hack a way to decide if it's
usable on things other than linux, because it's just one more thing to
break.<commit_after>#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
import numpy
import os
from distutils.errors import DistutilsError
if numpy.__dict__.get('quaternion') is not None:
raise DistutilsError('The target NumPy already has a quaternion type')
from numpy.distutils.misc_util import Configuration
from numpy.distutils.system_info import get_info
# if(os.environ.get('THIS_IS_TRAVIS') is not None):
# print("This appears to be Travis!")
# compile_args = ['-O3']
# else:
# compile_args = ['-ffast-math', '-O3']
compile_args = ['-O3']
config = Configuration('quaternion',parent_package,top_path)
config.add_extension('numpy_quaternion',
['quaternion.c','numpy_quaternion.c'],
extra_compile_args=compile_args,)
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
setup(configuration=configuration)
|
84355e70e39bbca5cd4cf6756c5b463dd36c1a9c
|
setup.py
|
setup.py
|
from setuptools import find_packages, setup
with open("README.rst") as fh:
long_description = fh.read()
setup(
name='qiime-studio',
version='0.0.1dev',
long_description=long_description,
packages=find_packages(),
install_requires=['click', 'flask', 'gevent'],
scripts=['scripts/qiime-studio']
)
|
from setuptools import find_packages, setup
with open("README.md") as fh:
long_description = fh.read()
setup(
name='qiime-studio',
version='0.0.1dev',
long_description=long_description,
packages=find_packages(),
install_requires=['click', 'flask', 'gevent'],
scripts=['scripts/qiime-studio']
)
|
Change install to use README.md vs .rst
|
Change install to use README.md vs .rst
|
Python
|
bsd-3-clause
|
qiime2/qiime-studio,qiime2/qiime-studio-frontend,jakereps/qiime-studio,jakereps/qiime-studio-frontend,qiime2/qiime-studio,qiime2/qiime-studio,qiime2/qiime-studio-frontend,jakereps/qiime-studio-frontend,jakereps/qiime-studio,jakereps/qiime-studio
|
from setuptools import find_packages, setup
with open("README.rst") as fh:
long_description = fh.read()
setup(
name='qiime-studio',
version='0.0.1dev',
long_description=long_description,
packages=find_packages(),
install_requires=['click', 'flask', 'gevent'],
scripts=['scripts/qiime-studio']
)
Change install to use README.md vs .rst
|
from setuptools import find_packages, setup
with open("README.md") as fh:
long_description = fh.read()
setup(
name='qiime-studio',
version='0.0.1dev',
long_description=long_description,
packages=find_packages(),
install_requires=['click', 'flask', 'gevent'],
scripts=['scripts/qiime-studio']
)
|
<commit_before>from setuptools import find_packages, setup
with open("README.rst") as fh:
long_description = fh.read()
setup(
name='qiime-studio',
version='0.0.1dev',
long_description=long_description,
packages=find_packages(),
install_requires=['click', 'flask', 'gevent'],
scripts=['scripts/qiime-studio']
)
<commit_msg>Change install to use README.md vs .rst<commit_after>
|
from setuptools import find_packages, setup
with open("README.md") as fh:
long_description = fh.read()
setup(
name='qiime-studio',
version='0.0.1dev',
long_description=long_description,
packages=find_packages(),
install_requires=['click', 'flask', 'gevent'],
scripts=['scripts/qiime-studio']
)
|
from setuptools import find_packages, setup
with open("README.rst") as fh:
long_description = fh.read()
setup(
name='qiime-studio',
version='0.0.1dev',
long_description=long_description,
packages=find_packages(),
install_requires=['click', 'flask', 'gevent'],
scripts=['scripts/qiime-studio']
)
Change install to use README.md vs .rstfrom setuptools import find_packages, setup
with open("README.md") as fh:
long_description = fh.read()
setup(
name='qiime-studio',
version='0.0.1dev',
long_description=long_description,
packages=find_packages(),
install_requires=['click', 'flask', 'gevent'],
scripts=['scripts/qiime-studio']
)
|
<commit_before>from setuptools import find_packages, setup
with open("README.rst") as fh:
long_description = fh.read()
setup(
name='qiime-studio',
version='0.0.1dev',
long_description=long_description,
packages=find_packages(),
install_requires=['click', 'flask', 'gevent'],
scripts=['scripts/qiime-studio']
)
<commit_msg>Change install to use README.md vs .rst<commit_after>from setuptools import find_packages, setup
with open("README.md") as fh:
long_description = fh.read()
setup(
name='qiime-studio',
version='0.0.1dev',
long_description=long_description,
packages=find_packages(),
install_requires=['click', 'flask', 'gevent'],
scripts=['scripts/qiime-studio']
)
|
5737f701d59c229d62f25734260fccb23722a67d
|
setup.py
|
setup.py
|
from distutils.core import setup
try:
from setuptools import setup
except:
pass
setup(
name = "pyjaco",
version = "1.0.0",
author = "Pyjaco development team",
author_email = "[email protected]",
description = ("Python to JavaScript translator"),
url = "http://pyjaco.org",
keywords = "python javascript translator compiler",
packages=["pyjaco", "pyjaco.compiler"],
)
|
from distutils.core import setup
try:
from setuptools import setup
except:
pass
setup(
name = "pyjaco",
version = "1.0.0",
author = "Pyjaco development team",
author_email = "[email protected]",
description = ("Python to JavaScript translator"),
scripts = ["pyjs.py"],
url = "http://pyjaco.org",
keywords = "python javascript translator compiler",
packages=["pyjaco", "pyjaco.compiler"],
)
|
Include the pyjs compiler script in pypi distribution.
|
Include the pyjs compiler script in pypi distribution.
I tested this in a virtualenv and it worked.
|
Python
|
mit
|
buchuki/pyjaco,chrivers/pyjaco,buchuki/pyjaco,chrivers/pyjaco,chrivers/pyjaco,buchuki/pyjaco
|
from distutils.core import setup
try:
from setuptools import setup
except:
pass
setup(
name = "pyjaco",
version = "1.0.0",
author = "Pyjaco development team",
author_email = "[email protected]",
description = ("Python to JavaScript translator"),
url = "http://pyjaco.org",
keywords = "python javascript translator compiler",
packages=["pyjaco", "pyjaco.compiler"],
)
Include the pyjs compiler script in pypi distribution.
I tested this in a virtualenv and it worked.
|
from distutils.core import setup
try:
from setuptools import setup
except:
pass
setup(
name = "pyjaco",
version = "1.0.0",
author = "Pyjaco development team",
author_email = "[email protected]",
description = ("Python to JavaScript translator"),
scripts = ["pyjs.py"],
url = "http://pyjaco.org",
keywords = "python javascript translator compiler",
packages=["pyjaco", "pyjaco.compiler"],
)
|
<commit_before>from distutils.core import setup
try:
from setuptools import setup
except:
pass
setup(
name = "pyjaco",
version = "1.0.0",
author = "Pyjaco development team",
author_email = "[email protected]",
description = ("Python to JavaScript translator"),
url = "http://pyjaco.org",
keywords = "python javascript translator compiler",
packages=["pyjaco", "pyjaco.compiler"],
)
<commit_msg>Include the pyjs compiler script in pypi distribution.
I tested this in a virtualenv and it worked.<commit_after>
|
from distutils.core import setup
try:
from setuptools import setup
except:
pass
setup(
name = "pyjaco",
version = "1.0.0",
author = "Pyjaco development team",
author_email = "[email protected]",
description = ("Python to JavaScript translator"),
scripts = ["pyjs.py"],
url = "http://pyjaco.org",
keywords = "python javascript translator compiler",
packages=["pyjaco", "pyjaco.compiler"],
)
|
from distutils.core import setup
try:
from setuptools import setup
except:
pass
setup(
name = "pyjaco",
version = "1.0.0",
author = "Pyjaco development team",
author_email = "[email protected]",
description = ("Python to JavaScript translator"),
url = "http://pyjaco.org",
keywords = "python javascript translator compiler",
packages=["pyjaco", "pyjaco.compiler"],
)
Include the pyjs compiler script in pypi distribution.
I tested this in a virtualenv and it worked.from distutils.core import setup
try:
from setuptools import setup
except:
pass
setup(
name = "pyjaco",
version = "1.0.0",
author = "Pyjaco development team",
author_email = "[email protected]",
description = ("Python to JavaScript translator"),
scripts = ["pyjs.py"],
url = "http://pyjaco.org",
keywords = "python javascript translator compiler",
packages=["pyjaco", "pyjaco.compiler"],
)
|
<commit_before>from distutils.core import setup
try:
from setuptools import setup
except:
pass
setup(
name = "pyjaco",
version = "1.0.0",
author = "Pyjaco development team",
author_email = "[email protected]",
description = ("Python to JavaScript translator"),
url = "http://pyjaco.org",
keywords = "python javascript translator compiler",
packages=["pyjaco", "pyjaco.compiler"],
)
<commit_msg>Include the pyjs compiler script in pypi distribution.
I tested this in a virtualenv and it worked.<commit_after>from distutils.core import setup
try:
from setuptools import setup
except:
pass
setup(
name = "pyjaco",
version = "1.0.0",
author = "Pyjaco development team",
author_email = "[email protected]",
description = ("Python to JavaScript translator"),
scripts = ["pyjs.py"],
url = "http://pyjaco.org",
keywords = "python javascript translator compiler",
packages=["pyjaco", "pyjaco.compiler"],
)
|
08f633cdf0f5dcd1940da46e91c175e81b39ad3f
|
setup.py
|
setup.py
|
#!/usr/bin/env python
"""
Setup script.
Created on Oct 10, 2011
@author: tmetsch
"""
from distutils.core import setup
from distutils.extension import Extension
try:
from Cython.Build import build_ext, cythonize
BUILD_EXTENSION = {'build_ext': build_ext}
EXT_MODULES = cythonize([Extension("dtrace", ["dtrace_cython/dtrace_h.pxd",
"dtrace_cython/consumer.pyx"],
libraries=["dtrace"])],
language_level=2)
except ImportError:
BUILD_EXTENSION = {}
EXT_MODULES = None
print('WARNING: Cython seems not to be present. Currently you will only'
' be able to use the ctypes wrapper. Or you can install cython and'
' try again.')
setup(name='python-dtrace',
version='0.0.10',
description='DTrace consumer for Python based on libdtrace. Use Python'
+ ' as DTrace Consumer and Provider! See the homepage for'
+ ' more information.',
license='MIT',
keywords='DTrace',
url='http://tmetsch.github.com/python-dtrace/',
packages=['dtrace_ctypes'],
cmdclass=BUILD_EXTENSION,
ext_modules=EXT_MODULES,
classifiers=["Development Status :: 2 - Pre-Alpha",
"Operating System :: OS Independent",
"Programming Language :: Python"
])
|
#!/usr/bin/env python
"""
Setup script.
Created on Oct 10, 2011
@author: tmetsch
"""
from distutils.core import setup
from distutils.extension import Extension
import sys
try:
from Cython.Build import build_ext, cythonize
BUILD_EXTENSION = {'build_ext': build_ext}
EXT_MODULES = cythonize([Extension("dtrace", ["dtrace_cython/dtrace_h.pxd",
"dtrace_cython/consumer.pyx"],
libraries=["dtrace"])],
language_level=sys.version_info.major)
except ImportError:
BUILD_EXTENSION = {}
EXT_MODULES = None
print('WARNING: Cython seems not to be present. Currently you will only'
' be able to use the ctypes wrapper. Or you can install cython and'
' try again.')
setup(name='python-dtrace',
version='0.0.10',
description='DTrace consumer for Python based on libdtrace. Use Python'
+ ' as DTrace Consumer and Provider! See the homepage for'
+ ' more information.',
license='MIT',
keywords='DTrace',
url='http://tmetsch.github.com/python-dtrace/',
packages=['dtrace_ctypes'],
cmdclass=BUILD_EXTENSION,
ext_modules=EXT_MODULES,
classifiers=["Development Status :: 2 - Pre-Alpha",
"Operating System :: OS Independent",
"Programming Language :: Python"
])
|
Set Cython language_level to 3 when compiling for python3
|
Set Cython language_level to 3 when compiling for python3
|
Python
|
mit
|
tmetsch/python-dtrace,tmetsch/python-dtrace
|
#!/usr/bin/env python
"""
Setup script.
Created on Oct 10, 2011
@author: tmetsch
"""
from distutils.core import setup
from distutils.extension import Extension
try:
from Cython.Build import build_ext, cythonize
BUILD_EXTENSION = {'build_ext': build_ext}
EXT_MODULES = cythonize([Extension("dtrace", ["dtrace_cython/dtrace_h.pxd",
"dtrace_cython/consumer.pyx"],
libraries=["dtrace"])],
language_level=2)
except ImportError:
BUILD_EXTENSION = {}
EXT_MODULES = None
print('WARNING: Cython seems not to be present. Currently you will only'
' be able to use the ctypes wrapper. Or you can install cython and'
' try again.')
setup(name='python-dtrace',
version='0.0.10',
description='DTrace consumer for Python based on libdtrace. Use Python'
+ ' as DTrace Consumer and Provider! See the homepage for'
+ ' more information.',
license='MIT',
keywords='DTrace',
url='http://tmetsch.github.com/python-dtrace/',
packages=['dtrace_ctypes'],
cmdclass=BUILD_EXTENSION,
ext_modules=EXT_MODULES,
classifiers=["Development Status :: 2 - Pre-Alpha",
"Operating System :: OS Independent",
"Programming Language :: Python"
])
Set Cython language_level to 3 when compiling for python3
|
#!/usr/bin/env python
"""
Setup script.
Created on Oct 10, 2011
@author: tmetsch
"""
from distutils.core import setup
from distutils.extension import Extension
import sys
try:
from Cython.Build import build_ext, cythonize
BUILD_EXTENSION = {'build_ext': build_ext}
EXT_MODULES = cythonize([Extension("dtrace", ["dtrace_cython/dtrace_h.pxd",
"dtrace_cython/consumer.pyx"],
libraries=["dtrace"])],
language_level=sys.version_info.major)
except ImportError:
BUILD_EXTENSION = {}
EXT_MODULES = None
print('WARNING: Cython seems not to be present. Currently you will only'
' be able to use the ctypes wrapper. Or you can install cython and'
' try again.')
setup(name='python-dtrace',
version='0.0.10',
description='DTrace consumer for Python based on libdtrace. Use Python'
+ ' as DTrace Consumer and Provider! See the homepage for'
+ ' more information.',
license='MIT',
keywords='DTrace',
url='http://tmetsch.github.com/python-dtrace/',
packages=['dtrace_ctypes'],
cmdclass=BUILD_EXTENSION,
ext_modules=EXT_MODULES,
classifiers=["Development Status :: 2 - Pre-Alpha",
"Operating System :: OS Independent",
"Programming Language :: Python"
])
|
<commit_before>#!/usr/bin/env python
"""
Setup script.
Created on Oct 10, 2011
@author: tmetsch
"""
from distutils.core import setup
from distutils.extension import Extension
try:
from Cython.Build import build_ext, cythonize
BUILD_EXTENSION = {'build_ext': build_ext}
EXT_MODULES = cythonize([Extension("dtrace", ["dtrace_cython/dtrace_h.pxd",
"dtrace_cython/consumer.pyx"],
libraries=["dtrace"])],
language_level=2)
except ImportError:
BUILD_EXTENSION = {}
EXT_MODULES = None
print('WARNING: Cython seems not to be present. Currently you will only'
' be able to use the ctypes wrapper. Or you can install cython and'
' try again.')
setup(name='python-dtrace',
version='0.0.10',
description='DTrace consumer for Python based on libdtrace. Use Python'
+ ' as DTrace Consumer and Provider! See the homepage for'
+ ' more information.',
license='MIT',
keywords='DTrace',
url='http://tmetsch.github.com/python-dtrace/',
packages=['dtrace_ctypes'],
cmdclass=BUILD_EXTENSION,
ext_modules=EXT_MODULES,
classifiers=["Development Status :: 2 - Pre-Alpha",
"Operating System :: OS Independent",
"Programming Language :: Python"
])
<commit_msg>Set Cython language_level to 3 when compiling for python3<commit_after>
|
#!/usr/bin/env python
"""
Setup script.
Created on Oct 10, 2011
@author: tmetsch
"""
from distutils.core import setup
from distutils.extension import Extension
import sys
try:
from Cython.Build import build_ext, cythonize
BUILD_EXTENSION = {'build_ext': build_ext}
EXT_MODULES = cythonize([Extension("dtrace", ["dtrace_cython/dtrace_h.pxd",
"dtrace_cython/consumer.pyx"],
libraries=["dtrace"])],
language_level=sys.version_info.major)
except ImportError:
BUILD_EXTENSION = {}
EXT_MODULES = None
print('WARNING: Cython seems not to be present. Currently you will only'
' be able to use the ctypes wrapper. Or you can install cython and'
' try again.')
setup(name='python-dtrace',
version='0.0.10',
description='DTrace consumer for Python based on libdtrace. Use Python'
+ ' as DTrace Consumer and Provider! See the homepage for'
+ ' more information.',
license='MIT',
keywords='DTrace',
url='http://tmetsch.github.com/python-dtrace/',
packages=['dtrace_ctypes'],
cmdclass=BUILD_EXTENSION,
ext_modules=EXT_MODULES,
classifiers=["Development Status :: 2 - Pre-Alpha",
"Operating System :: OS Independent",
"Programming Language :: Python"
])
|
#!/usr/bin/env python
"""
Setup script.
Created on Oct 10, 2011
@author: tmetsch
"""
from distutils.core import setup
from distutils.extension import Extension
try:
from Cython.Build import build_ext, cythonize
BUILD_EXTENSION = {'build_ext': build_ext}
EXT_MODULES = cythonize([Extension("dtrace", ["dtrace_cython/dtrace_h.pxd",
"dtrace_cython/consumer.pyx"],
libraries=["dtrace"])],
language_level=2)
except ImportError:
BUILD_EXTENSION = {}
EXT_MODULES = None
print('WARNING: Cython seems not to be present. Currently you will only'
' be able to use the ctypes wrapper. Or you can install cython and'
' try again.')
setup(name='python-dtrace',
version='0.0.10',
description='DTrace consumer for Python based on libdtrace. Use Python'
+ ' as DTrace Consumer and Provider! See the homepage for'
+ ' more information.',
license='MIT',
keywords='DTrace',
url='http://tmetsch.github.com/python-dtrace/',
packages=['dtrace_ctypes'],
cmdclass=BUILD_EXTENSION,
ext_modules=EXT_MODULES,
classifiers=["Development Status :: 2 - Pre-Alpha",
"Operating System :: OS Independent",
"Programming Language :: Python"
])
Set Cython language_level to 3 when compiling for python3#!/usr/bin/env python
"""
Setup script.
Created on Oct 10, 2011
@author: tmetsch
"""
from distutils.core import setup
from distutils.extension import Extension
import sys
try:
from Cython.Build import build_ext, cythonize
BUILD_EXTENSION = {'build_ext': build_ext}
EXT_MODULES = cythonize([Extension("dtrace", ["dtrace_cython/dtrace_h.pxd",
"dtrace_cython/consumer.pyx"],
libraries=["dtrace"])],
language_level=sys.version_info.major)
except ImportError:
BUILD_EXTENSION = {}
EXT_MODULES = None
print('WARNING: Cython seems not to be present. Currently you will only'
' be able to use the ctypes wrapper. Or you can install cython and'
' try again.')
setup(name='python-dtrace',
version='0.0.10',
description='DTrace consumer for Python based on libdtrace. Use Python'
+ ' as DTrace Consumer and Provider! See the homepage for'
+ ' more information.',
license='MIT',
keywords='DTrace',
url='http://tmetsch.github.com/python-dtrace/',
packages=['dtrace_ctypes'],
cmdclass=BUILD_EXTENSION,
ext_modules=EXT_MODULES,
classifiers=["Development Status :: 2 - Pre-Alpha",
"Operating System :: OS Independent",
"Programming Language :: Python"
])
|
<commit_before>#!/usr/bin/env python
"""
Setup script.
Created on Oct 10, 2011
@author: tmetsch
"""
from distutils.core import setup
from distutils.extension import Extension
try:
from Cython.Build import build_ext, cythonize
BUILD_EXTENSION = {'build_ext': build_ext}
EXT_MODULES = cythonize([Extension("dtrace", ["dtrace_cython/dtrace_h.pxd",
"dtrace_cython/consumer.pyx"],
libraries=["dtrace"])],
language_level=2)
except ImportError:
BUILD_EXTENSION = {}
EXT_MODULES = None
print('WARNING: Cython seems not to be present. Currently you will only'
' be able to use the ctypes wrapper. Or you can install cython and'
' try again.')
setup(name='python-dtrace',
version='0.0.10',
description='DTrace consumer for Python based on libdtrace. Use Python'
+ ' as DTrace Consumer and Provider! See the homepage for'
+ ' more information.',
license='MIT',
keywords='DTrace',
url='http://tmetsch.github.com/python-dtrace/',
packages=['dtrace_ctypes'],
cmdclass=BUILD_EXTENSION,
ext_modules=EXT_MODULES,
classifiers=["Development Status :: 2 - Pre-Alpha",
"Operating System :: OS Independent",
"Programming Language :: Python"
])
<commit_msg>Set Cython language_level to 3 when compiling for python3<commit_after>#!/usr/bin/env python
"""
Setup script.
Created on Oct 10, 2011
@author: tmetsch
"""
from distutils.core import setup
from distutils.extension import Extension
import sys
try:
from Cython.Build import build_ext, cythonize
BUILD_EXTENSION = {'build_ext': build_ext}
EXT_MODULES = cythonize([Extension("dtrace", ["dtrace_cython/dtrace_h.pxd",
"dtrace_cython/consumer.pyx"],
libraries=["dtrace"])],
language_level=sys.version_info.major)
except ImportError:
BUILD_EXTENSION = {}
EXT_MODULES = None
print('WARNING: Cython seems not to be present. Currently you will only'
' be able to use the ctypes wrapper. Or you can install cython and'
' try again.')
setup(name='python-dtrace',
version='0.0.10',
description='DTrace consumer for Python based on libdtrace. Use Python'
+ ' as DTrace Consumer and Provider! See the homepage for'
+ ' more information.',
license='MIT',
keywords='DTrace',
url='http://tmetsch.github.com/python-dtrace/',
packages=['dtrace_ctypes'],
cmdclass=BUILD_EXTENSION,
ext_modules=EXT_MODULES,
classifiers=["Development Status :: 2 - Pre-Alpha",
"Operating System :: OS Independent",
"Programming Language :: Python"
])
|
5b263cd9c88e7e846fce3b38b9fbc069e809b13c
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name = 'processout',
packages = ['ProcessOut'],
version = '2.1.0',
description = 'ProcessOut API bindings.',
author = 'ProcessOut',
author_email = '[email protected]',
url = 'https://github.com/processout/processout-python',
download_url = 'https://github.com/processout/processout-python/tarball/2.0.0',
keywords = ['ProcessOut', 'api', 'bindings'],
classifiers = [],
)
|
from distutils.core import setup
setup(
name = 'processout',
packages = ['ProcessOut'],
version = '2.1.1',
description = 'ProcessOut API bindings.',
author = 'ProcessOut',
author_email = '[email protected]',
url = 'https://github.com/processout/processout-python',
download_url = 'https://github.com/processout/processout-python/tarball/2.1.1',
keywords = ['ProcessOut', 'api', 'bindings'],
classifiers = [],
)
|
Update download lind and version number
|
Update download lind and version number
|
Python
|
mit
|
ProcessOut/processout-python
|
from distutils.core import setup
setup(
name = 'processout',
packages = ['ProcessOut'],
version = '2.1.0',
description = 'ProcessOut API bindings.',
author = 'ProcessOut',
author_email = '[email protected]',
url = 'https://github.com/processout/processout-python',
download_url = 'https://github.com/processout/processout-python/tarball/2.0.0',
keywords = ['ProcessOut', 'api', 'bindings'],
classifiers = [],
)
Update download lind and version number
|
from distutils.core import setup
setup(
name = 'processout',
packages = ['ProcessOut'],
version = '2.1.1',
description = 'ProcessOut API bindings.',
author = 'ProcessOut',
author_email = '[email protected]',
url = 'https://github.com/processout/processout-python',
download_url = 'https://github.com/processout/processout-python/tarball/2.1.1',
keywords = ['ProcessOut', 'api', 'bindings'],
classifiers = [],
)
|
<commit_before>from distutils.core import setup
setup(
name = 'processout',
packages = ['ProcessOut'],
version = '2.1.0',
description = 'ProcessOut API bindings.',
author = 'ProcessOut',
author_email = '[email protected]',
url = 'https://github.com/processout/processout-python',
download_url = 'https://github.com/processout/processout-python/tarball/2.0.0',
keywords = ['ProcessOut', 'api', 'bindings'],
classifiers = [],
)
<commit_msg>Update download lind and version number<commit_after>
|
from distutils.core import setup
setup(
name = 'processout',
packages = ['ProcessOut'],
version = '2.1.1',
description = 'ProcessOut API bindings.',
author = 'ProcessOut',
author_email = '[email protected]',
url = 'https://github.com/processout/processout-python',
download_url = 'https://github.com/processout/processout-python/tarball/2.1.1',
keywords = ['ProcessOut', 'api', 'bindings'],
classifiers = [],
)
|
from distutils.core import setup
setup(
name = 'processout',
packages = ['ProcessOut'],
version = '2.1.0',
description = 'ProcessOut API bindings.',
author = 'ProcessOut',
author_email = '[email protected]',
url = 'https://github.com/processout/processout-python',
download_url = 'https://github.com/processout/processout-python/tarball/2.0.0',
keywords = ['ProcessOut', 'api', 'bindings'],
classifiers = [],
)
Update download lind and version numberfrom distutils.core import setup
setup(
name = 'processout',
packages = ['ProcessOut'],
version = '2.1.1',
description = 'ProcessOut API bindings.',
author = 'ProcessOut',
author_email = '[email protected]',
url = 'https://github.com/processout/processout-python',
download_url = 'https://github.com/processout/processout-python/tarball/2.1.1',
keywords = ['ProcessOut', 'api', 'bindings'],
classifiers = [],
)
|
<commit_before>from distutils.core import setup
setup(
name = 'processout',
packages = ['ProcessOut'],
version = '2.1.0',
description = 'ProcessOut API bindings.',
author = 'ProcessOut',
author_email = '[email protected]',
url = 'https://github.com/processout/processout-python',
download_url = 'https://github.com/processout/processout-python/tarball/2.0.0',
keywords = ['ProcessOut', 'api', 'bindings'],
classifiers = [],
)
<commit_msg>Update download lind and version number<commit_after>from distutils.core import setup
setup(
name = 'processout',
packages = ['ProcessOut'],
version = '2.1.1',
description = 'ProcessOut API bindings.',
author = 'ProcessOut',
author_email = '[email protected]',
url = 'https://github.com/processout/processout-python',
download_url = 'https://github.com/processout/processout-python/tarball/2.1.1',
keywords = ['ProcessOut', 'api', 'bindings'],
classifiers = [],
)
|
69e1e4450eaeb9d57f4cbfff020a9ed56dbbb3ce
|
setup.py
|
setup.py
|
# Python imports
from setuptools import setup
# Project imports
from notable import app
# Attributes
AUTHOR = 'John McFarlane'
DESCRIPTION = 'A very simple note taking application'
EMAIL = '[email protected]'
NAME = 'Notable'
PYPI = 'http://pypi.python.org/packages/source/N/Notable'
URL = 'https://github.com/jmcfarlane/Notable'
CLASSIFIERS = """
Development Status :: 2 - Pre-Alpha
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Operating System :: OS Independent
Programming Language :: Python
Topic :: Internet :: WWW/HTTP
Intended Audience :: End Users/Desktop
Topic :: Office/Business :: News/Diary
Topic :: Security :: Cryptography
Topic :: Utilities
"""
setup(
author = AUTHOR,
author_email = EMAIL,
classifiers = [c for c in CLASSIFIERS.split('\n') if c],
description = DESCRIPTION,
download_url = '%s/Notable-%s.tar.gz' % (PYPI, app.version),
include_package_data = True,
name = NAME,
packages = ['notable'],
scripts = ['scripts/notable'],
url = URL,
version = app.version
)
|
# Python imports
from setuptools import setup
# Project imports
from notable import app
# Attributes
AUTHOR = 'John McFarlane'
DESCRIPTION = 'A very simple note taking application'
EMAIL = '[email protected]'
NAME = 'Notable'
PYPI = 'http://pypi.python.org/packages/source/N/Notable'
URL = 'https://github.com/jmcfarlane/Notable'
CLASSIFIERS = """
Development Status :: 2 - Pre-Alpha
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Operating System :: OS Independent
Programming Language :: Python
Topic :: Internet :: WWW/HTTP
Intended Audience :: End Users/Desktop
Topic :: Office/Business :: News/Diary
Topic :: Security :: Cryptography
Topic :: Utilities
"""
setup(
author = AUTHOR,
author_email = EMAIL,
classifiers = [c for c in CLASSIFIERS.split('\n') if c],
description = DESCRIPTION,
download_url = '%s/Notable-%s.tar.gz' % (PYPI, app.version),
include_package_data = True,
name = NAME,
packages = ['notable'],
scripts = ['scripts/notable'],
test_suite='nose.collector',
url = URL,
version = app.version
)
|
Set nose.collector as the test_suite
|
Set nose.collector as the test_suite
|
Python
|
mit
|
jmcfarlane/Notable,jmcfarlane/Notable,jmcfarlane/Notable,jmcfarlane/Notable
|
# Python imports
from setuptools import setup
# Project imports
from notable import app
# Attributes
AUTHOR = 'John McFarlane'
DESCRIPTION = 'A very simple note taking application'
EMAIL = '[email protected]'
NAME = 'Notable'
PYPI = 'http://pypi.python.org/packages/source/N/Notable'
URL = 'https://github.com/jmcfarlane/Notable'
CLASSIFIERS = """
Development Status :: 2 - Pre-Alpha
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Operating System :: OS Independent
Programming Language :: Python
Topic :: Internet :: WWW/HTTP
Intended Audience :: End Users/Desktop
Topic :: Office/Business :: News/Diary
Topic :: Security :: Cryptography
Topic :: Utilities
"""
setup(
author = AUTHOR,
author_email = EMAIL,
classifiers = [c for c in CLASSIFIERS.split('\n') if c],
description = DESCRIPTION,
download_url = '%s/Notable-%s.tar.gz' % (PYPI, app.version),
include_package_data = True,
name = NAME,
packages = ['notable'],
scripts = ['scripts/notable'],
url = URL,
version = app.version
)
Set nose.collector as the test_suite
|
# Python imports
from setuptools import setup
# Project imports
from notable import app
# Attributes
AUTHOR = 'John McFarlane'
DESCRIPTION = 'A very simple note taking application'
EMAIL = '[email protected]'
NAME = 'Notable'
PYPI = 'http://pypi.python.org/packages/source/N/Notable'
URL = 'https://github.com/jmcfarlane/Notable'
CLASSIFIERS = """
Development Status :: 2 - Pre-Alpha
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Operating System :: OS Independent
Programming Language :: Python
Topic :: Internet :: WWW/HTTP
Intended Audience :: End Users/Desktop
Topic :: Office/Business :: News/Diary
Topic :: Security :: Cryptography
Topic :: Utilities
"""
setup(
author = AUTHOR,
author_email = EMAIL,
classifiers = [c for c in CLASSIFIERS.split('\n') if c],
description = DESCRIPTION,
download_url = '%s/Notable-%s.tar.gz' % (PYPI, app.version),
include_package_data = True,
name = NAME,
packages = ['notable'],
scripts = ['scripts/notable'],
test_suite='nose.collector',
url = URL,
version = app.version
)
|
<commit_before># Python imports
from setuptools import setup
# Project imports
from notable import app
# Attributes
AUTHOR = 'John McFarlane'
DESCRIPTION = 'A very simple note taking application'
EMAIL = '[email protected]'
NAME = 'Notable'
PYPI = 'http://pypi.python.org/packages/source/N/Notable'
URL = 'https://github.com/jmcfarlane/Notable'
CLASSIFIERS = """
Development Status :: 2 - Pre-Alpha
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Operating System :: OS Independent
Programming Language :: Python
Topic :: Internet :: WWW/HTTP
Intended Audience :: End Users/Desktop
Topic :: Office/Business :: News/Diary
Topic :: Security :: Cryptography
Topic :: Utilities
"""
setup(
author = AUTHOR,
author_email = EMAIL,
classifiers = [c for c in CLASSIFIERS.split('\n') if c],
description = DESCRIPTION,
download_url = '%s/Notable-%s.tar.gz' % (PYPI, app.version),
include_package_data = True,
name = NAME,
packages = ['notable'],
scripts = ['scripts/notable'],
url = URL,
version = app.version
)
<commit_msg>Set nose.collector as the test_suite<commit_after>
|
# Python imports
from setuptools import setup
# Project imports
from notable import app
# Attributes
AUTHOR = 'John McFarlane'
DESCRIPTION = 'A very simple note taking application'
EMAIL = '[email protected]'
NAME = 'Notable'
PYPI = 'http://pypi.python.org/packages/source/N/Notable'
URL = 'https://github.com/jmcfarlane/Notable'
CLASSIFIERS = """
Development Status :: 2 - Pre-Alpha
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Operating System :: OS Independent
Programming Language :: Python
Topic :: Internet :: WWW/HTTP
Intended Audience :: End Users/Desktop
Topic :: Office/Business :: News/Diary
Topic :: Security :: Cryptography
Topic :: Utilities
"""
setup(
author = AUTHOR,
author_email = EMAIL,
classifiers = [c for c in CLASSIFIERS.split('\n') if c],
description = DESCRIPTION,
download_url = '%s/Notable-%s.tar.gz' % (PYPI, app.version),
include_package_data = True,
name = NAME,
packages = ['notable'],
scripts = ['scripts/notable'],
test_suite='nose.collector',
url = URL,
version = app.version
)
|
# Python imports
from setuptools import setup
# Project imports
from notable import app
# Attributes
AUTHOR = 'John McFarlane'
DESCRIPTION = 'A very simple note taking application'
EMAIL = '[email protected]'
NAME = 'Notable'
PYPI = 'http://pypi.python.org/packages/source/N/Notable'
URL = 'https://github.com/jmcfarlane/Notable'
CLASSIFIERS = """
Development Status :: 2 - Pre-Alpha
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Operating System :: OS Independent
Programming Language :: Python
Topic :: Internet :: WWW/HTTP
Intended Audience :: End Users/Desktop
Topic :: Office/Business :: News/Diary
Topic :: Security :: Cryptography
Topic :: Utilities
"""
setup(
author = AUTHOR,
author_email = EMAIL,
classifiers = [c for c in CLASSIFIERS.split('\n') if c],
description = DESCRIPTION,
download_url = '%s/Notable-%s.tar.gz' % (PYPI, app.version),
include_package_data = True,
name = NAME,
packages = ['notable'],
scripts = ['scripts/notable'],
url = URL,
version = app.version
)
Set nose.collector as the test_suite# Python imports
from setuptools import setup
# Project imports
from notable import app
# Attributes
AUTHOR = 'John McFarlane'
DESCRIPTION = 'A very simple note taking application'
EMAIL = '[email protected]'
NAME = 'Notable'
PYPI = 'http://pypi.python.org/packages/source/N/Notable'
URL = 'https://github.com/jmcfarlane/Notable'
CLASSIFIERS = """
Development Status :: 2 - Pre-Alpha
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Operating System :: OS Independent
Programming Language :: Python
Topic :: Internet :: WWW/HTTP
Intended Audience :: End Users/Desktop
Topic :: Office/Business :: News/Diary
Topic :: Security :: Cryptography
Topic :: Utilities
"""
setup(
author = AUTHOR,
author_email = EMAIL,
classifiers = [c for c in CLASSIFIERS.split('\n') if c],
description = DESCRIPTION,
download_url = '%s/Notable-%s.tar.gz' % (PYPI, app.version),
include_package_data = True,
name = NAME,
packages = ['notable'],
scripts = ['scripts/notable'],
test_suite='nose.collector',
url = URL,
version = app.version
)
|
<commit_before># Python imports
from setuptools import setup
# Project imports
from notable import app
# Attributes
AUTHOR = 'John McFarlane'
DESCRIPTION = 'A very simple note taking application'
EMAIL = '[email protected]'
NAME = 'Notable'
PYPI = 'http://pypi.python.org/packages/source/N/Notable'
URL = 'https://github.com/jmcfarlane/Notable'
CLASSIFIERS = """
Development Status :: 2 - Pre-Alpha
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Operating System :: OS Independent
Programming Language :: Python
Topic :: Internet :: WWW/HTTP
Intended Audience :: End Users/Desktop
Topic :: Office/Business :: News/Diary
Topic :: Security :: Cryptography
Topic :: Utilities
"""
setup(
author = AUTHOR,
author_email = EMAIL,
classifiers = [c for c in CLASSIFIERS.split('\n') if c],
description = DESCRIPTION,
download_url = '%s/Notable-%s.tar.gz' % (PYPI, app.version),
include_package_data = True,
name = NAME,
packages = ['notable'],
scripts = ['scripts/notable'],
url = URL,
version = app.version
)
<commit_msg>Set nose.collector as the test_suite<commit_after># Python imports
from setuptools import setup
# Project imports
from notable import app
# Attributes
AUTHOR = 'John McFarlane'
DESCRIPTION = 'A very simple note taking application'
EMAIL = '[email protected]'
NAME = 'Notable'
PYPI = 'http://pypi.python.org/packages/source/N/Notable'
URL = 'https://github.com/jmcfarlane/Notable'
CLASSIFIERS = """
Development Status :: 2 - Pre-Alpha
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Operating System :: OS Independent
Programming Language :: Python
Topic :: Internet :: WWW/HTTP
Intended Audience :: End Users/Desktop
Topic :: Office/Business :: News/Diary
Topic :: Security :: Cryptography
Topic :: Utilities
"""
setup(
author = AUTHOR,
author_email = EMAIL,
classifiers = [c for c in CLASSIFIERS.split('\n') if c],
description = DESCRIPTION,
download_url = '%s/Notable-%s.tar.gz' % (PYPI, app.version),
include_package_data = True,
name = NAME,
packages = ['notable'],
scripts = ['scripts/notable'],
test_suite='nose.collector',
url = URL,
version = app.version
)
|
934ed097d92728bde9e1fc42b11d688c2b512847
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
def version():
"""Return version string."""
with open('autopep8.py') as input_file:
for line in input_file:
if line.startswith('__version__'):
import ast
return ast.literal_eval(line.split('=')[1].strip())
with open('README.rst') as readme:
setup(
name='autopep8',
version=version(),
description='A tool that automatically formats Python code to conform '
'to the PEP 8 style guide',
long_description=readme.read(),
license='Expat License',
author='Hideo Hattori',
author_email='[email protected]',
url='https://github.com/hhatto/autopep8',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Unix Shell',
],
keywords='automation, pep8, format',
install_requires=['pep8'],
test_suite='test.test_autopep8',
py_modules=['autopep8'],
zip_safe=False,
entry_points={'console_scripts': ['autopep8 = autopep8:main']},
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
def version():
"""Return version string."""
with open('autopep8.py') as input_file:
for line in input_file:
if line.startswith('__version__'):
import ast
return ast.literal_eval(line.split('=')[1].strip())
with open('README.rst') as readme:
setup(
name='autopep8',
version=version(),
description='A tool that automatically formats Python code to conform '
'to the PEP 8 style guide',
long_description=readme.read(),
license='Expat License',
author='Hideo Hattori',
author_email='[email protected]',
url='https://github.com/hhatto/autopep8',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Unix Shell',
],
keywords='automation, pep8, format',
install_requires=['pep8 >= 1.3'],
test_suite='test.test_autopep8',
py_modules=['autopep8'],
zip_safe=False,
entry_points={'console_scripts': ['autopep8 = autopep8:main']},
)
|
Make pep8 dependency more explicit
|
Make pep8 dependency more explicit
|
Python
|
mit
|
vauxoo-dev/autopep8,MeteorAdminz/autopep8,MeteorAdminz/autopep8,hhatto/autopep8,vauxoo-dev/autopep8,Vauxoo/autopep8,Vauxoo/autopep8,hhatto/autopep8,SG345/autopep8,SG345/autopep8
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
def version():
"""Return version string."""
with open('autopep8.py') as input_file:
for line in input_file:
if line.startswith('__version__'):
import ast
return ast.literal_eval(line.split('=')[1].strip())
with open('README.rst') as readme:
setup(
name='autopep8',
version=version(),
description='A tool that automatically formats Python code to conform '
'to the PEP 8 style guide',
long_description=readme.read(),
license='Expat License',
author='Hideo Hattori',
author_email='[email protected]',
url='https://github.com/hhatto/autopep8',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Unix Shell',
],
keywords='automation, pep8, format',
install_requires=['pep8'],
test_suite='test.test_autopep8',
py_modules=['autopep8'],
zip_safe=False,
entry_points={'console_scripts': ['autopep8 = autopep8:main']},
)
Make pep8 dependency more explicit
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
def version():
"""Return version string."""
with open('autopep8.py') as input_file:
for line in input_file:
if line.startswith('__version__'):
import ast
return ast.literal_eval(line.split('=')[1].strip())
with open('README.rst') as readme:
setup(
name='autopep8',
version=version(),
description='A tool that automatically formats Python code to conform '
'to the PEP 8 style guide',
long_description=readme.read(),
license='Expat License',
author='Hideo Hattori',
author_email='[email protected]',
url='https://github.com/hhatto/autopep8',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Unix Shell',
],
keywords='automation, pep8, format',
install_requires=['pep8 >= 1.3'],
test_suite='test.test_autopep8',
py_modules=['autopep8'],
zip_safe=False,
entry_points={'console_scripts': ['autopep8 = autopep8:main']},
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
def version():
"""Return version string."""
with open('autopep8.py') as input_file:
for line in input_file:
if line.startswith('__version__'):
import ast
return ast.literal_eval(line.split('=')[1].strip())
with open('README.rst') as readme:
setup(
name='autopep8',
version=version(),
description='A tool that automatically formats Python code to conform '
'to the PEP 8 style guide',
long_description=readme.read(),
license='Expat License',
author='Hideo Hattori',
author_email='[email protected]',
url='https://github.com/hhatto/autopep8',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Unix Shell',
],
keywords='automation, pep8, format',
install_requires=['pep8'],
test_suite='test.test_autopep8',
py_modules=['autopep8'],
zip_safe=False,
entry_points={'console_scripts': ['autopep8 = autopep8:main']},
)
<commit_msg>Make pep8 dependency more explicit<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
def version():
"""Return version string."""
with open('autopep8.py') as input_file:
for line in input_file:
if line.startswith('__version__'):
import ast
return ast.literal_eval(line.split('=')[1].strip())
with open('README.rst') as readme:
setup(
name='autopep8',
version=version(),
description='A tool that automatically formats Python code to conform '
'to the PEP 8 style guide',
long_description=readme.read(),
license='Expat License',
author='Hideo Hattori',
author_email='[email protected]',
url='https://github.com/hhatto/autopep8',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Unix Shell',
],
keywords='automation, pep8, format',
install_requires=['pep8 >= 1.3'],
test_suite='test.test_autopep8',
py_modules=['autopep8'],
zip_safe=False,
entry_points={'console_scripts': ['autopep8 = autopep8:main']},
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
def version():
"""Return version string."""
with open('autopep8.py') as input_file:
for line in input_file:
if line.startswith('__version__'):
import ast
return ast.literal_eval(line.split('=')[1].strip())
with open('README.rst') as readme:
setup(
name='autopep8',
version=version(),
description='A tool that automatically formats Python code to conform '
'to the PEP 8 style guide',
long_description=readme.read(),
license='Expat License',
author='Hideo Hattori',
author_email='[email protected]',
url='https://github.com/hhatto/autopep8',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Unix Shell',
],
keywords='automation, pep8, format',
install_requires=['pep8'],
test_suite='test.test_autopep8',
py_modules=['autopep8'],
zip_safe=False,
entry_points={'console_scripts': ['autopep8 = autopep8:main']},
)
Make pep8 dependency more explicit#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
def version():
"""Return version string."""
with open('autopep8.py') as input_file:
for line in input_file:
if line.startswith('__version__'):
import ast
return ast.literal_eval(line.split('=')[1].strip())
with open('README.rst') as readme:
setup(
name='autopep8',
version=version(),
description='A tool that automatically formats Python code to conform '
'to the PEP 8 style guide',
long_description=readme.read(),
license='Expat License',
author='Hideo Hattori',
author_email='[email protected]',
url='https://github.com/hhatto/autopep8',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Unix Shell',
],
keywords='automation, pep8, format',
install_requires=['pep8 >= 1.3'],
test_suite='test.test_autopep8',
py_modules=['autopep8'],
zip_safe=False,
entry_points={'console_scripts': ['autopep8 = autopep8:main']},
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
def version():
"""Return version string."""
with open('autopep8.py') as input_file:
for line in input_file:
if line.startswith('__version__'):
import ast
return ast.literal_eval(line.split('=')[1].strip())
with open('README.rst') as readme:
setup(
name='autopep8',
version=version(),
description='A tool that automatically formats Python code to conform '
'to the PEP 8 style guide',
long_description=readme.read(),
license='Expat License',
author='Hideo Hattori',
author_email='[email protected]',
url='https://github.com/hhatto/autopep8',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Unix Shell',
],
keywords='automation, pep8, format',
install_requires=['pep8'],
test_suite='test.test_autopep8',
py_modules=['autopep8'],
zip_safe=False,
entry_points={'console_scripts': ['autopep8 = autopep8:main']},
)
<commit_msg>Make pep8 dependency more explicit<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
def version():
"""Return version string."""
with open('autopep8.py') as input_file:
for line in input_file:
if line.startswith('__version__'):
import ast
return ast.literal_eval(line.split('=')[1].strip())
with open('README.rst') as readme:
setup(
name='autopep8',
version=version(),
description='A tool that automatically formats Python code to conform '
'to the PEP 8 style guide',
long_description=readme.read(),
license='Expat License',
author='Hideo Hattori',
author_email='[email protected]',
url='https://github.com/hhatto/autopep8',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Unix Shell',
],
keywords='automation, pep8, format',
install_requires=['pep8 >= 1.3'],
test_suite='test.test_autopep8',
py_modules=['autopep8'],
zip_safe=False,
entry_points={'console_scripts': ['autopep8 = autopep8:main']},
)
|
f5206fa6cd94758202378b7616e578bd8a3a8dfe
|
tasks.py
|
tasks.py
|
"""Task functions for use with Invoke."""
from invoke import task
@task
def clean(context):
cmd = '$(npm bin)/gulp clean'
context.run(cmd)
@task
def requirements(context):
steps = [
'pip install -r requirements.txt',
'npm install',
'$(npm bin)/bower install',
]
cmd = ' && '.join(steps)
context.run(cmd)
@task
def run(context, host='127.0.0.1', port='5000'):
steps = [
'open http://{host}:{port}/',
'FLASK_APP=typesetter/typesetter.py FLASK_DEBUG=1 flask run --host={host} --port={port}',
]
steps = [step.format(host=host, port=port) for step in steps]
cmd = ' && '.join(steps)
context.run(cmd)
@task
def static(context):
cmd = '$(npm bin)/gulp'
context.run(cmd)
|
"""Task functions for use with Invoke."""
from threading import Thread
from invoke import task
@task
def clean(context):
cmd = '$(npm bin)/gulp clean'
context.run(cmd)
@task
def requirements(context):
steps = [
'pip install -r requirements.txt',
'npm install',
'$(npm bin)/bower install',
]
cmd = ' && '.join(steps)
context.run(cmd)
@task
def serve(context, host='127.0.0.1', port='5000'):
steps = [
'open http://{host}:{port}/',
'FLASK_APP=typesetter/typesetter.py FLASK_DEBUG=1 flask run --host={host} --port={port}',
]
steps = [step.format(host=host, port=port) for step in steps]
cmd = ' && '.join(steps)
context.run(cmd)
@task
def static(context):
cmd = '$(npm bin)/gulp'
context.run(cmd)
@task
def stream(context, host=None):
tasks = [static, serve]
threads = [Thread(target=task, args=(context,), daemon=True) for task in tasks]
[t.start() for t in threads]
[t.join() for t in threads]
|
Use threads to allow simultaneous serving of site and building of assets
|
Use threads to allow simultaneous serving of site and building of assets
|
Python
|
mit
|
rlucioni/typesetter,rlucioni/typesetter,rlucioni/typesetter
|
"""Task functions for use with Invoke."""
from invoke import task
@task
def clean(context):
cmd = '$(npm bin)/gulp clean'
context.run(cmd)
@task
def requirements(context):
steps = [
'pip install -r requirements.txt',
'npm install',
'$(npm bin)/bower install',
]
cmd = ' && '.join(steps)
context.run(cmd)
@task
def run(context, host='127.0.0.1', port='5000'):
steps = [
'open http://{host}:{port}/',
'FLASK_APP=typesetter/typesetter.py FLASK_DEBUG=1 flask run --host={host} --port={port}',
]
steps = [step.format(host=host, port=port) for step in steps]
cmd = ' && '.join(steps)
context.run(cmd)
@task
def static(context):
cmd = '$(npm bin)/gulp'
context.run(cmd)
Use threads to allow simultaneous serving of site and building of assets
|
"""Task functions for use with Invoke."""
from threading import Thread
from invoke import task
@task
def clean(context):
cmd = '$(npm bin)/gulp clean'
context.run(cmd)
@task
def requirements(context):
steps = [
'pip install -r requirements.txt',
'npm install',
'$(npm bin)/bower install',
]
cmd = ' && '.join(steps)
context.run(cmd)
@task
def serve(context, host='127.0.0.1', port='5000'):
steps = [
'open http://{host}:{port}/',
'FLASK_APP=typesetter/typesetter.py FLASK_DEBUG=1 flask run --host={host} --port={port}',
]
steps = [step.format(host=host, port=port) for step in steps]
cmd = ' && '.join(steps)
context.run(cmd)
@task
def static(context):
cmd = '$(npm bin)/gulp'
context.run(cmd)
@task
def stream(context, host=None):
tasks = [static, serve]
threads = [Thread(target=task, args=(context,), daemon=True) for task in tasks]
[t.start() for t in threads]
[t.join() for t in threads]
|
<commit_before>"""Task functions for use with Invoke."""
from invoke import task
@task
def clean(context):
cmd = '$(npm bin)/gulp clean'
context.run(cmd)
@task
def requirements(context):
steps = [
'pip install -r requirements.txt',
'npm install',
'$(npm bin)/bower install',
]
cmd = ' && '.join(steps)
context.run(cmd)
@task
def run(context, host='127.0.0.1', port='5000'):
steps = [
'open http://{host}:{port}/',
'FLASK_APP=typesetter/typesetter.py FLASK_DEBUG=1 flask run --host={host} --port={port}',
]
steps = [step.format(host=host, port=port) for step in steps]
cmd = ' && '.join(steps)
context.run(cmd)
@task
def static(context):
cmd = '$(npm bin)/gulp'
context.run(cmd)
<commit_msg>Use threads to allow simultaneous serving of site and building of assets<commit_after>
|
"""Task functions for use with Invoke."""
from threading import Thread
from invoke import task
@task
def clean(context):
cmd = '$(npm bin)/gulp clean'
context.run(cmd)
@task
def requirements(context):
steps = [
'pip install -r requirements.txt',
'npm install',
'$(npm bin)/bower install',
]
cmd = ' && '.join(steps)
context.run(cmd)
@task
def serve(context, host='127.0.0.1', port='5000'):
steps = [
'open http://{host}:{port}/',
'FLASK_APP=typesetter/typesetter.py FLASK_DEBUG=1 flask run --host={host} --port={port}',
]
steps = [step.format(host=host, port=port) for step in steps]
cmd = ' && '.join(steps)
context.run(cmd)
@task
def static(context):
cmd = '$(npm bin)/gulp'
context.run(cmd)
@task
def stream(context, host=None):
tasks = [static, serve]
threads = [Thread(target=task, args=(context,), daemon=True) for task in tasks]
[t.start() for t in threads]
[t.join() for t in threads]
|
"""Task functions for use with Invoke."""
from invoke import task
@task
def clean(context):
cmd = '$(npm bin)/gulp clean'
context.run(cmd)
@task
def requirements(context):
steps = [
'pip install -r requirements.txt',
'npm install',
'$(npm bin)/bower install',
]
cmd = ' && '.join(steps)
context.run(cmd)
@task
def run(context, host='127.0.0.1', port='5000'):
steps = [
'open http://{host}:{port}/',
'FLASK_APP=typesetter/typesetter.py FLASK_DEBUG=1 flask run --host={host} --port={port}',
]
steps = [step.format(host=host, port=port) for step in steps]
cmd = ' && '.join(steps)
context.run(cmd)
@task
def static(context):
cmd = '$(npm bin)/gulp'
context.run(cmd)
Use threads to allow simultaneous serving of site and building of assets"""Task functions for use with Invoke."""
from threading import Thread
from invoke import task
@task
def clean(context):
cmd = '$(npm bin)/gulp clean'
context.run(cmd)
@task
def requirements(context):
steps = [
'pip install -r requirements.txt',
'npm install',
'$(npm bin)/bower install',
]
cmd = ' && '.join(steps)
context.run(cmd)
@task
def serve(context, host='127.0.0.1', port='5000'):
steps = [
'open http://{host}:{port}/',
'FLASK_APP=typesetter/typesetter.py FLASK_DEBUG=1 flask run --host={host} --port={port}',
]
steps = [step.format(host=host, port=port) for step in steps]
cmd = ' && '.join(steps)
context.run(cmd)
@task
def static(context):
cmd = '$(npm bin)/gulp'
context.run(cmd)
@task
def stream(context, host=None):
tasks = [static, serve]
threads = [Thread(target=task, args=(context,), daemon=True) for task in tasks]
[t.start() for t in threads]
[t.join() for t in threads]
|
<commit_before>"""Task functions for use with Invoke."""
from invoke import task
@task
def clean(context):
cmd = '$(npm bin)/gulp clean'
context.run(cmd)
@task
def requirements(context):
steps = [
'pip install -r requirements.txt',
'npm install',
'$(npm bin)/bower install',
]
cmd = ' && '.join(steps)
context.run(cmd)
@task
def run(context, host='127.0.0.1', port='5000'):
steps = [
'open http://{host}:{port}/',
'FLASK_APP=typesetter/typesetter.py FLASK_DEBUG=1 flask run --host={host} --port={port}',
]
steps = [step.format(host=host, port=port) for step in steps]
cmd = ' && '.join(steps)
context.run(cmd)
@task
def static(context):
cmd = '$(npm bin)/gulp'
context.run(cmd)
<commit_msg>Use threads to allow simultaneous serving of site and building of assets<commit_after>"""Task functions for use with Invoke."""
from threading import Thread
from invoke import task
@task
def clean(context):
cmd = '$(npm bin)/gulp clean'
context.run(cmd)
@task
def requirements(context):
steps = [
'pip install -r requirements.txt',
'npm install',
'$(npm bin)/bower install',
]
cmd = ' && '.join(steps)
context.run(cmd)
@task
def serve(context, host='127.0.0.1', port='5000'):
steps = [
'open http://{host}:{port}/',
'FLASK_APP=typesetter/typesetter.py FLASK_DEBUG=1 flask run --host={host} --port={port}',
]
steps = [step.format(host=host, port=port) for step in steps]
cmd = ' && '.join(steps)
context.run(cmd)
@task
def static(context):
cmd = '$(npm bin)/gulp'
context.run(cmd)
@task
def stream(context, host=None):
tasks = [static, serve]
threads = [Thread(target=task, args=(context,), daemon=True) for task in tasks]
[t.start() for t in threads]
[t.join() for t in threads]
|
0dec612eb764cd5cc236637e3fa7473a7c01f8de
|
run.py
|
run.py
|
#!/usr/bin/python
# This is a platform-independent script to automate building, compiling,
# and assembling cubeOS. Windows users must have das and dcpu-16 binaries
# in the cubeOS directory, or in their system PATH location
from subprocess import call
import build #Runs build.py
assembleStatus = call(["das","-o","cubeOS.bin","cubeOS.dasm16"])
if (assembleStatus==0):
runStatus = call(["dcpu-16","cubeOS.bin"])
print "dcpu-16 execution finished with status",runStatus
else:
print "**ASSEMBLY FAILED WITH STATUS ",assembleStatus,"**"
|
#!/usr/bin/python2
# This is a platform-independent script to automate building, compiling,
# and assembling cubeOS. Windows users must have das and dcpu-16 binaries
# in the cubeOS directory, or in their system PATH location
from subprocess import call
import build #Runs build.py
assembleStatus = call(["das","-o","cubeOS.bin","cubeOS.dasm16"])
if (assembleStatus==0):
runStatus = call(["dcpu-16","cubeOS.bin"])
print "dcpu-16 execution finished with status",runStatus
else:
print "**ASSEMBLY FAILED WITH STATUS ",assembleStatus,"**"
|
Set Python shebang to python2, since it fails on python3.
|
Set Python shebang to python2, since it fails on python3.
|
Python
|
mit
|
cubeOS/cubeOS-alpha
|
#!/usr/bin/python
# This is a platform-independent script to automate building, compiling,
# and assembling cubeOS. Windows users must have das and dcpu-16 binaries
# in the cubeOS directory, or in their system PATH location
from subprocess import call
import build #Runs build.py
assembleStatus = call(["das","-o","cubeOS.bin","cubeOS.dasm16"])
if (assembleStatus==0):
runStatus = call(["dcpu-16","cubeOS.bin"])
print "dcpu-16 execution finished with status",runStatus
else:
print "**ASSEMBLY FAILED WITH STATUS ",assembleStatus,"**"
Set Python shebang to python2, since it fails on python3.
|
#!/usr/bin/python2
# This is a platform-independent script to automate building, compiling,
# and assembling cubeOS. Windows users must have das and dcpu-16 binaries
# in the cubeOS directory, or in their system PATH location
from subprocess import call
import build #Runs build.py
assembleStatus = call(["das","-o","cubeOS.bin","cubeOS.dasm16"])
if (assembleStatus==0):
runStatus = call(["dcpu-16","cubeOS.bin"])
print "dcpu-16 execution finished with status",runStatus
else:
print "**ASSEMBLY FAILED WITH STATUS ",assembleStatus,"**"
|
<commit_before>#!/usr/bin/python
# This is a platform-independent script to automate building, compiling,
# and assembling cubeOS. Windows users must have das and dcpu-16 binaries
# in the cubeOS directory, or in their system PATH location
from subprocess import call
import build #Runs build.py
assembleStatus = call(["das","-o","cubeOS.bin","cubeOS.dasm16"])
if (assembleStatus==0):
runStatus = call(["dcpu-16","cubeOS.bin"])
print "dcpu-16 execution finished with status",runStatus
else:
print "**ASSEMBLY FAILED WITH STATUS ",assembleStatus,"**"
<commit_msg>Set Python shebang to python2, since it fails on python3.<commit_after>
|
#!/usr/bin/python2
# This is a platform-independent script to automate building, compiling,
# and assembling cubeOS. Windows users must have das and dcpu-16 binaries
# in the cubeOS directory, or in their system PATH location
from subprocess import call
import build #Runs build.py
assembleStatus = call(["das","-o","cubeOS.bin","cubeOS.dasm16"])
if (assembleStatus==0):
runStatus = call(["dcpu-16","cubeOS.bin"])
print "dcpu-16 execution finished with status",runStatus
else:
print "**ASSEMBLY FAILED WITH STATUS ",assembleStatus,"**"
|
#!/usr/bin/python
# This is a platform-independent script to automate building, compiling,
# and assembling cubeOS. Windows users must have das and dcpu-16 binaries
# in the cubeOS directory, or in their system PATH location
from subprocess import call
import build #Runs build.py
assembleStatus = call(["das","-o","cubeOS.bin","cubeOS.dasm16"])
if (assembleStatus==0):
runStatus = call(["dcpu-16","cubeOS.bin"])
print "dcpu-16 execution finished with status",runStatus
else:
print "**ASSEMBLY FAILED WITH STATUS ",assembleStatus,"**"
Set Python shebang to python2, since it fails on python3.#!/usr/bin/python2
# This is a platform-independent script to automate building, compiling,
# and assembling cubeOS. Windows users must have das and dcpu-16 binaries
# in the cubeOS directory, or in their system PATH location
from subprocess import call
import build #Runs build.py
assembleStatus = call(["das","-o","cubeOS.bin","cubeOS.dasm16"])
if (assembleStatus==0):
runStatus = call(["dcpu-16","cubeOS.bin"])
print "dcpu-16 execution finished with status",runStatus
else:
print "**ASSEMBLY FAILED WITH STATUS ",assembleStatus,"**"
|
<commit_before>#!/usr/bin/python
# This is a platform-independent script to automate building, compiling,
# and assembling cubeOS. Windows users must have das and dcpu-16 binaries
# in the cubeOS directory, or in their system PATH location
from subprocess import call
import build #Runs build.py
assembleStatus = call(["das","-o","cubeOS.bin","cubeOS.dasm16"])
if (assembleStatus==0):
runStatus = call(["dcpu-16","cubeOS.bin"])
print "dcpu-16 execution finished with status",runStatus
else:
print "**ASSEMBLY FAILED WITH STATUS ",assembleStatus,"**"
<commit_msg>Set Python shebang to python2, since it fails on python3.<commit_after>#!/usr/bin/python2
# This is a platform-independent script to automate building, compiling,
# and assembling cubeOS. Windows users must have das and dcpu-16 binaries
# in the cubeOS directory, or in their system PATH location
from subprocess import call
import build #Runs build.py
assembleStatus = call(["das","-o","cubeOS.bin","cubeOS.dasm16"])
if (assembleStatus==0):
runStatus = call(["dcpu-16","cubeOS.bin"])
print "dcpu-16 execution finished with status",runStatus
else:
print "**ASSEMBLY FAILED WITH STATUS ",assembleStatus,"**"
|
6a5c9ccf0bd2582cf42577712309b8fd6e912966
|
blo/__init__.py
|
blo/__init__.py
|
import configparser
from blo.BloArticle import BloArticle
from blo.DBControl import DBControl
class Blo:
def __init__(self, config_file_path):
config = configparser.ConfigParser()
config.read(config_file_path)
self._db_file_path = config['DB']['DB_PATH']
self._template_dir = config['TEMPLATE']['TEMPLATE_DIR']
self._default_template_file = config['TEMPLATE']['DEFAULT_TEMPLATE_FILE']
# create tables
self._db_control = DBControl(self._db_file_path)
self._db_control.create_tables()
self._db_control.close_connect()
def insert_article(self, file_path):
self._db_control = DBControl(self._db_file_path)
article = BloArticle(self._template_dir)
article.load_from_file(file_path)
self._db_control.insert_article(article, self._default_template_file)
self._db_control.close_connect()
|
import configparser
from blo.BloArticle import BloArticle
from blo.DBControl import DBControl
class Blo:
def __init__(self, config_file_path):
config = configparser.ConfigParser()
config.read(config_file_path)
self._db_file_path = config['DB']['DB_PATH'].replace('"', '')
self._template_dir = config['TEMPLATE']['TEMPLATE_DIR'].replace('"', '')
self._default_template_file = config['TEMPLATE']['DEFAULT_TEMPLATE_FILE'].replace('"', '')
# create tables
self._db_control = DBControl(self._db_file_path)
self._db_control.create_tables()
self._db_control.close_connect()
def insert_article(self, file_path):
self._db_control = DBControl(self._db_file_path)
article = BloArticle(self._template_dir)
article.load_from_file(file_path)
self._db_control.insert_article(article, self._default_template_file)
self._db_control.close_connect()
|
Add replace double quotation mark from configuration file parameters.
|
Add replace double quotation mark from configuration file parameters.
|
Python
|
mit
|
10nin/blo,10nin/blo
|
import configparser
from blo.BloArticle import BloArticle
from blo.DBControl import DBControl
class Blo:
def __init__(self, config_file_path):
config = configparser.ConfigParser()
config.read(config_file_path)
self._db_file_path = config['DB']['DB_PATH']
self._template_dir = config['TEMPLATE']['TEMPLATE_DIR']
self._default_template_file = config['TEMPLATE']['DEFAULT_TEMPLATE_FILE']
# create tables
self._db_control = DBControl(self._db_file_path)
self._db_control.create_tables()
self._db_control.close_connect()
def insert_article(self, file_path):
self._db_control = DBControl(self._db_file_path)
article = BloArticle(self._template_dir)
article.load_from_file(file_path)
self._db_control.insert_article(article, self._default_template_file)
self._db_control.close_connect()
Add replace double quotation mark from configuration file parameters.
|
import configparser
from blo.BloArticle import BloArticle
from blo.DBControl import DBControl
class Blo:
def __init__(self, config_file_path):
config = configparser.ConfigParser()
config.read(config_file_path)
self._db_file_path = config['DB']['DB_PATH'].replace('"', '')
self._template_dir = config['TEMPLATE']['TEMPLATE_DIR'].replace('"', '')
self._default_template_file = config['TEMPLATE']['DEFAULT_TEMPLATE_FILE'].replace('"', '')
# create tables
self._db_control = DBControl(self._db_file_path)
self._db_control.create_tables()
self._db_control.close_connect()
def insert_article(self, file_path):
self._db_control = DBControl(self._db_file_path)
article = BloArticle(self._template_dir)
article.load_from_file(file_path)
self._db_control.insert_article(article, self._default_template_file)
self._db_control.close_connect()
|
<commit_before>import configparser
from blo.BloArticle import BloArticle
from blo.DBControl import DBControl
class Blo:
def __init__(self, config_file_path):
config = configparser.ConfigParser()
config.read(config_file_path)
self._db_file_path = config['DB']['DB_PATH']
self._template_dir = config['TEMPLATE']['TEMPLATE_DIR']
self._default_template_file = config['TEMPLATE']['DEFAULT_TEMPLATE_FILE']
# create tables
self._db_control = DBControl(self._db_file_path)
self._db_control.create_tables()
self._db_control.close_connect()
def insert_article(self, file_path):
self._db_control = DBControl(self._db_file_path)
article = BloArticle(self._template_dir)
article.load_from_file(file_path)
self._db_control.insert_article(article, self._default_template_file)
self._db_control.close_connect()
<commit_msg>Add replace double quotation mark from configuration file parameters.<commit_after>
|
import configparser
from blo.BloArticle import BloArticle
from blo.DBControl import DBControl
class Blo:
def __init__(self, config_file_path):
config = configparser.ConfigParser()
config.read(config_file_path)
self._db_file_path = config['DB']['DB_PATH'].replace('"', '')
self._template_dir = config['TEMPLATE']['TEMPLATE_DIR'].replace('"', '')
self._default_template_file = config['TEMPLATE']['DEFAULT_TEMPLATE_FILE'].replace('"', '')
# create tables
self._db_control = DBControl(self._db_file_path)
self._db_control.create_tables()
self._db_control.close_connect()
def insert_article(self, file_path):
self._db_control = DBControl(self._db_file_path)
article = BloArticle(self._template_dir)
article.load_from_file(file_path)
self._db_control.insert_article(article, self._default_template_file)
self._db_control.close_connect()
|
import configparser
from blo.BloArticle import BloArticle
from blo.DBControl import DBControl
class Blo:
def __init__(self, config_file_path):
config = configparser.ConfigParser()
config.read(config_file_path)
self._db_file_path = config['DB']['DB_PATH']
self._template_dir = config['TEMPLATE']['TEMPLATE_DIR']
self._default_template_file = config['TEMPLATE']['DEFAULT_TEMPLATE_FILE']
# create tables
self._db_control = DBControl(self._db_file_path)
self._db_control.create_tables()
self._db_control.close_connect()
def insert_article(self, file_path):
self._db_control = DBControl(self._db_file_path)
article = BloArticle(self._template_dir)
article.load_from_file(file_path)
self._db_control.insert_article(article, self._default_template_file)
self._db_control.close_connect()
Add replace double quotation mark from configuration file parameters.import configparser
from blo.BloArticle import BloArticle
from blo.DBControl import DBControl
class Blo:
def __init__(self, config_file_path):
config = configparser.ConfigParser()
config.read(config_file_path)
self._db_file_path = config['DB']['DB_PATH'].replace('"', '')
self._template_dir = config['TEMPLATE']['TEMPLATE_DIR'].replace('"', '')
self._default_template_file = config['TEMPLATE']['DEFAULT_TEMPLATE_FILE'].replace('"', '')
# create tables
self._db_control = DBControl(self._db_file_path)
self._db_control.create_tables()
self._db_control.close_connect()
def insert_article(self, file_path):
self._db_control = DBControl(self._db_file_path)
article = BloArticle(self._template_dir)
article.load_from_file(file_path)
self._db_control.insert_article(article, self._default_template_file)
self._db_control.close_connect()
|
<commit_before>import configparser
from blo.BloArticle import BloArticle
from blo.DBControl import DBControl
class Blo:
def __init__(self, config_file_path):
config = configparser.ConfigParser()
config.read(config_file_path)
self._db_file_path = config['DB']['DB_PATH']
self._template_dir = config['TEMPLATE']['TEMPLATE_DIR']
self._default_template_file = config['TEMPLATE']['DEFAULT_TEMPLATE_FILE']
# create tables
self._db_control = DBControl(self._db_file_path)
self._db_control.create_tables()
self._db_control.close_connect()
def insert_article(self, file_path):
self._db_control = DBControl(self._db_file_path)
article = BloArticle(self._template_dir)
article.load_from_file(file_path)
self._db_control.insert_article(article, self._default_template_file)
self._db_control.close_connect()
<commit_msg>Add replace double quotation mark from configuration file parameters.<commit_after>import configparser
from blo.BloArticle import BloArticle
from blo.DBControl import DBControl
class Blo:
def __init__(self, config_file_path):
config = configparser.ConfigParser()
config.read(config_file_path)
self._db_file_path = config['DB']['DB_PATH'].replace('"', '')
self._template_dir = config['TEMPLATE']['TEMPLATE_DIR'].replace('"', '')
self._default_template_file = config['TEMPLATE']['DEFAULT_TEMPLATE_FILE'].replace('"', '')
# create tables
self._db_control = DBControl(self._db_file_path)
self._db_control.create_tables()
self._db_control.close_connect()
def insert_article(self, file_path):
self._db_control = DBControl(self._db_file_path)
article = BloArticle(self._template_dir)
article.load_from_file(file_path)
self._db_control.insert_article(article, self._default_template_file)
self._db_control.close_connect()
|
06b536db7ed82a68a3c1627769364b80dd85e259
|
alexandria/__init__.py
|
alexandria/__init__.py
|
import logging
log = logging.getLogger(__name__)
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import DBSession
required_settings = [
'pyramid.secret.session',
'pyramid.secret.auth',
]
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
config = Configurator(settings=settings)
do_start = True
for _req in required_settings:
if _req not in settings:
log.error('{} is not set in configuration file.'.format(_req))
do_start = False
if do_start is False:
log.error('Unable to start due to missing configuration')
exit(-1)
# Include the transaction manager
config.include('pyramid_tm')
config.add_static_view('static', 'static', cache_max_age=3600)
config.add_route('main',
'/*traverse',
use_global_views=True
)
|
import logging
log = logging.getLogger(__name__)
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import DBSession
required_settings = [
'pyramid.secret.session',
'pyramid.secret.auth',
]
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
config = Configurator(settings=settings)
do_start = True
for _req in required_settings:
if _req not in settings:
log.error('{} is not set in configuration file.'.format(_req))
do_start = False
if do_start is False:
log.error('Unable to start due to missing configuration')
exit(-1)
# Include the transaction manager
config.include('pyramid_tm')
config.add_static_view('static', 'static', cache_max_age=3600)
config.add_route('main',
'/*traverse',
use_global_views=True
)
return config.make_wsgi_app()
|
Make sure to return the wsgi app
|
Make sure to return the wsgi app
|
Python
|
isc
|
cdunklau/alexandria,cdunklau/alexandria,bertjwregeer/alexandria,cdunklau/alexandria,bertjwregeer/alexandria
|
import logging
log = logging.getLogger(__name__)
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import DBSession
required_settings = [
'pyramid.secret.session',
'pyramid.secret.auth',
]
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
config = Configurator(settings=settings)
do_start = True
for _req in required_settings:
if _req not in settings:
log.error('{} is not set in configuration file.'.format(_req))
do_start = False
if do_start is False:
log.error('Unable to start due to missing configuration')
exit(-1)
# Include the transaction manager
config.include('pyramid_tm')
config.add_static_view('static', 'static', cache_max_age=3600)
config.add_route('main',
'/*traverse',
use_global_views=True
)
Make sure to return the wsgi app
|
import logging
log = logging.getLogger(__name__)
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import DBSession
required_settings = [
'pyramid.secret.session',
'pyramid.secret.auth',
]
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
config = Configurator(settings=settings)
do_start = True
for _req in required_settings:
if _req not in settings:
log.error('{} is not set in configuration file.'.format(_req))
do_start = False
if do_start is False:
log.error('Unable to start due to missing configuration')
exit(-1)
# Include the transaction manager
config.include('pyramid_tm')
config.add_static_view('static', 'static', cache_max_age=3600)
config.add_route('main',
'/*traverse',
use_global_views=True
)
return config.make_wsgi_app()
|
<commit_before>import logging
log = logging.getLogger(__name__)
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import DBSession
required_settings = [
'pyramid.secret.session',
'pyramid.secret.auth',
]
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
config = Configurator(settings=settings)
do_start = True
for _req in required_settings:
if _req not in settings:
log.error('{} is not set in configuration file.'.format(_req))
do_start = False
if do_start is False:
log.error('Unable to start due to missing configuration')
exit(-1)
# Include the transaction manager
config.include('pyramid_tm')
config.add_static_view('static', 'static', cache_max_age=3600)
config.add_route('main',
'/*traverse',
use_global_views=True
)
<commit_msg>Make sure to return the wsgi app<commit_after>
|
import logging
log = logging.getLogger(__name__)
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import DBSession
required_settings = [
'pyramid.secret.session',
'pyramid.secret.auth',
]
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
config = Configurator(settings=settings)
do_start = True
for _req in required_settings:
if _req not in settings:
log.error('{} is not set in configuration file.'.format(_req))
do_start = False
if do_start is False:
log.error('Unable to start due to missing configuration')
exit(-1)
# Include the transaction manager
config.include('pyramid_tm')
config.add_static_view('static', 'static', cache_max_age=3600)
config.add_route('main',
'/*traverse',
use_global_views=True
)
return config.make_wsgi_app()
|
import logging
log = logging.getLogger(__name__)
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import DBSession
required_settings = [
'pyramid.secret.session',
'pyramid.secret.auth',
]
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
config = Configurator(settings=settings)
do_start = True
for _req in required_settings:
if _req not in settings:
log.error('{} is not set in configuration file.'.format(_req))
do_start = False
if do_start is False:
log.error('Unable to start due to missing configuration')
exit(-1)
# Include the transaction manager
config.include('pyramid_tm')
config.add_static_view('static', 'static', cache_max_age=3600)
config.add_route('main',
'/*traverse',
use_global_views=True
)
Make sure to return the wsgi appimport logging
log = logging.getLogger(__name__)
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import DBSession
required_settings = [
'pyramid.secret.session',
'pyramid.secret.auth',
]
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
config = Configurator(settings=settings)
do_start = True
for _req in required_settings:
if _req not in settings:
log.error('{} is not set in configuration file.'.format(_req))
do_start = False
if do_start is False:
log.error('Unable to start due to missing configuration')
exit(-1)
# Include the transaction manager
config.include('pyramid_tm')
config.add_static_view('static', 'static', cache_max_age=3600)
config.add_route('main',
'/*traverse',
use_global_views=True
)
return config.make_wsgi_app()
|
<commit_before>import logging
log = logging.getLogger(__name__)
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import DBSession
required_settings = [
'pyramid.secret.session',
'pyramid.secret.auth',
]
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
config = Configurator(settings=settings)
do_start = True
for _req in required_settings:
if _req not in settings:
log.error('{} is not set in configuration file.'.format(_req))
do_start = False
if do_start is False:
log.error('Unable to start due to missing configuration')
exit(-1)
# Include the transaction manager
config.include('pyramid_tm')
config.add_static_view('static', 'static', cache_max_age=3600)
config.add_route('main',
'/*traverse',
use_global_views=True
)
<commit_msg>Make sure to return the wsgi app<commit_after>import logging
log = logging.getLogger(__name__)
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import DBSession
required_settings = [
'pyramid.secret.session',
'pyramid.secret.auth',
]
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
config = Configurator(settings=settings)
do_start = True
for _req in required_settings:
if _req not in settings:
log.error('{} is not set in configuration file.'.format(_req))
do_start = False
if do_start is False:
log.error('Unable to start due to missing configuration')
exit(-1)
# Include the transaction manager
config.include('pyramid_tm')
config.add_static_view('static', 'static', cache_max_age=3600)
config.add_route('main',
'/*traverse',
use_global_views=True
)
return config.make_wsgi_app()
|
1fffdb60aa4eb875bfbd961773d0cf5066dc38e2
|
django_website/views.py
|
django_website/views.py
|
"""
Misc. views.
"""
from __future__ import absolute_import
from django.contrib.comments.models import Comment
from django.contrib.sitemaps import views as sitemap_views
from django.shortcuts import render
from django.views.decorators.cache import cache_page
from django.views.decorators.csrf import requires_csrf_token
from django.views.generic import list_detail
from .sitemaps import FlatPageSitemap, WeblogSitemap
@cache_page(60*60*6)
def sitemap(request):
return sitemap_views.sitemap(request, sitemaps={
'weblog': WeblogSitemap,
'flatpages': FlatPageSitemap,
})
def comments(request):
return list_detail.object_list(
request,
queryset = Comment.objects.filter(is_public=True).order_by('-submit_date'),
paginate_by = 30,
)
@requires_csrf_token
def server_error(request, template_name='500.html'):
"""
Custom 500 error handler for static stuff.
"""
return render(request, template_name)
|
from django.shortcuts import render
from django.views.decorators.csrf import requires_csrf_token
@requires_csrf_token
def server_error(request, template_name='500.html'):
"""
Custom 500 error handler for static stuff.
"""
return render(request, template_name)
|
Remove dead code. This isn't wired in any URLconf.
|
Remove dead code. This isn't wired in any URLconf.
|
Python
|
bsd-3-clause
|
nanuxbe/django,xavierdutreilh/djangoproject.com,vxvinh1511/djangoproject.com,rmoorman/djangoproject.com,gnarf/djangoproject.com,django/djangoproject.com,rmoorman/djangoproject.com,relekang/djangoproject.com,hassanabidpk/djangoproject.com,alawnchen/djangoproject.com,alawnchen/djangoproject.com,khkaminska/djangoproject.com,nanuxbe/django,nanuxbe/django,nanuxbe/django,hassanabidpk/djangoproject.com,gnarf/djangoproject.com,alawnchen/djangoproject.com,khkaminska/djangoproject.com,relekang/djangoproject.com,django/djangoproject.com,xavierdutreilh/djangoproject.com,relekang/djangoproject.com,relekang/djangoproject.com,django/djangoproject.com,rmoorman/djangoproject.com,gnarf/djangoproject.com,alawnchen/djangoproject.com,xavierdutreilh/djangoproject.com,django/djangoproject.com,vxvinh1511/djangoproject.com,xavierdutreilh/djangoproject.com,khkaminska/djangoproject.com,django/djangoproject.com,hassanabidpk/djangoproject.com,khkaminska/djangoproject.com,gnarf/djangoproject.com,rmoorman/djangoproject.com,vxvinh1511/djangoproject.com,vxvinh1511/djangoproject.com,hassanabidpk/djangoproject.com,django/djangoproject.com
|
"""
Misc. views.
"""
from __future__ import absolute_import
from django.contrib.comments.models import Comment
from django.contrib.sitemaps import views as sitemap_views
from django.shortcuts import render
from django.views.decorators.cache import cache_page
from django.views.decorators.csrf import requires_csrf_token
from django.views.generic import list_detail
from .sitemaps import FlatPageSitemap, WeblogSitemap
@cache_page(60*60*6)
def sitemap(request):
return sitemap_views.sitemap(request, sitemaps={
'weblog': WeblogSitemap,
'flatpages': FlatPageSitemap,
})
def comments(request):
return list_detail.object_list(
request,
queryset = Comment.objects.filter(is_public=True).order_by('-submit_date'),
paginate_by = 30,
)
@requires_csrf_token
def server_error(request, template_name='500.html'):
"""
Custom 500 error handler for static stuff.
"""
return render(request, template_name)
Remove dead code. This isn't wired in any URLconf.
|
from django.shortcuts import render
from django.views.decorators.csrf import requires_csrf_token
@requires_csrf_token
def server_error(request, template_name='500.html'):
"""
Custom 500 error handler for static stuff.
"""
return render(request, template_name)
|
<commit_before>"""
Misc. views.
"""
from __future__ import absolute_import
from django.contrib.comments.models import Comment
from django.contrib.sitemaps import views as sitemap_views
from django.shortcuts import render
from django.views.decorators.cache import cache_page
from django.views.decorators.csrf import requires_csrf_token
from django.views.generic import list_detail
from .sitemaps import FlatPageSitemap, WeblogSitemap
@cache_page(60*60*6)
def sitemap(request):
return sitemap_views.sitemap(request, sitemaps={
'weblog': WeblogSitemap,
'flatpages': FlatPageSitemap,
})
def comments(request):
return list_detail.object_list(
request,
queryset = Comment.objects.filter(is_public=True).order_by('-submit_date'),
paginate_by = 30,
)
@requires_csrf_token
def server_error(request, template_name='500.html'):
"""
Custom 500 error handler for static stuff.
"""
return render(request, template_name)
<commit_msg>Remove dead code. This isn't wired in any URLconf.<commit_after>
|
from django.shortcuts import render
from django.views.decorators.csrf import requires_csrf_token
@requires_csrf_token
def server_error(request, template_name='500.html'):
"""
Custom 500 error handler for static stuff.
"""
return render(request, template_name)
|
"""
Misc. views.
"""
from __future__ import absolute_import
from django.contrib.comments.models import Comment
from django.contrib.sitemaps import views as sitemap_views
from django.shortcuts import render
from django.views.decorators.cache import cache_page
from django.views.decorators.csrf import requires_csrf_token
from django.views.generic import list_detail
from .sitemaps import FlatPageSitemap, WeblogSitemap
@cache_page(60*60*6)
def sitemap(request):
return sitemap_views.sitemap(request, sitemaps={
'weblog': WeblogSitemap,
'flatpages': FlatPageSitemap,
})
def comments(request):
return list_detail.object_list(
request,
queryset = Comment.objects.filter(is_public=True).order_by('-submit_date'),
paginate_by = 30,
)
@requires_csrf_token
def server_error(request, template_name='500.html'):
"""
Custom 500 error handler for static stuff.
"""
return render(request, template_name)
Remove dead code. This isn't wired in any URLconf.from django.shortcuts import render
from django.views.decorators.csrf import requires_csrf_token
@requires_csrf_token
def server_error(request, template_name='500.html'):
"""
Custom 500 error handler for static stuff.
"""
return render(request, template_name)
|
<commit_before>"""
Misc. views.
"""
from __future__ import absolute_import
from django.contrib.comments.models import Comment
from django.contrib.sitemaps import views as sitemap_views
from django.shortcuts import render
from django.views.decorators.cache import cache_page
from django.views.decorators.csrf import requires_csrf_token
from django.views.generic import list_detail
from .sitemaps import FlatPageSitemap, WeblogSitemap
@cache_page(60*60*6)
def sitemap(request):
return sitemap_views.sitemap(request, sitemaps={
'weblog': WeblogSitemap,
'flatpages': FlatPageSitemap,
})
def comments(request):
return list_detail.object_list(
request,
queryset = Comment.objects.filter(is_public=True).order_by('-submit_date'),
paginate_by = 30,
)
@requires_csrf_token
def server_error(request, template_name='500.html'):
"""
Custom 500 error handler for static stuff.
"""
return render(request, template_name)
<commit_msg>Remove dead code. This isn't wired in any URLconf.<commit_after>from django.shortcuts import render
from django.views.decorators.csrf import requires_csrf_token
@requires_csrf_token
def server_error(request, template_name='500.html'):
"""
Custom 500 error handler for static stuff.
"""
return render(request, template_name)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.