repo_name
stringlengths 7
65
| path
stringlengths 5
187
| copies
stringclasses 483
values | size
stringlengths 4
7
| content
stringlengths 805
1.02M
| license
stringclasses 14
values |
|---|---|---|---|---|---|
mozilla/normandy
|
contract-tests/v3_api/test_group_update.py
|
1
|
1210
|
import uuid
from support.assertions import assert_valid_schema
from urllib.parse import urljoin
def test_group_update(conf, requests_session, headers):
# Create a new group
data = {"name": str(uuid.uuid4())}
response = requests_session.post(
urljoin(conf.getoption("server"), "/api/v3/group/"), headers=headers, data=data
)
assert response.status_code == 201
assert_valid_schema(response.json())
group_data = response.json()
group_id = group_data["id"]
# Verify group was stored and contains expected data
response = requests_session.get(
urljoin(conf.getoption("server"), "/api/v3/group/{}/".format(group_id)), headers=headers
)
group_data = response.json()
assert response.status_code == 200
assert_valid_schema(response.json())
# Use the update to change the name
updated_data = {"name": str(uuid.uuid4())}
response = requests_session.put(
urljoin(conf.getoption("server"), "/api/v3/group/{}/".format(group_id)),
headers=headers,
data=updated_data,
)
assert response.status_code == 200
assert_valid_schema(response.json())
assert response.json()["name"] == updated_data["name"]
|
mpl-2.0
|
mozilla/normandy
|
normandy/conftest.py
|
1
|
3099
|
from django.core.management import call_command
from django.db import connection
from django.db.migrations.executor import MigrationExecutor
import pytest
import requests_mock
from graphene.test import Client as GrapheneClient
from rest_framework.test import APIClient
from normandy.schema import schema as normandy_schema
from normandy.base.tests import UserFactory
from normandy.recipes import geolocation as geolocation_module
from normandy.recipes.tests import fake_sign
@pytest.fixture
def api_client():
"""Fixture to provide a DRF API client."""
user = UserFactory(is_superuser=True)
client = APIClient()
client.force_authenticate(user=user)
return client
@pytest.fixture
def gql_client():
"""Fixture to provide a Graphene client."""
client = GrapheneClient(normandy_schema)
return client
@pytest.fixture
def geolocation():
"""Fixture to load geolocation data."""
geolocation_module.load_geoip_database()
if geolocation_module.geoip_reader is None:
pytest.skip()
else:
return geolocation_module
@pytest.fixture
def mocked_autograph(mocker):
mocked = mocker.patch("normandy.recipes.models.Autographer")
mocked.return_value.sign_data.side_effect = fake_sign
return mocked
@pytest.fixture
def mocked_remotesettings(mocker):
return mocker.patch("normandy.recipes.models.RemoteSettings")
@pytest.fixture
def rs_settings(settings):
settings.REMOTE_SETTINGS_URL = "https://remotesettings.example.com/v1"
settings.REMOTE_SETTINGS_USERNAME = "normandy"
settings.REMOTE_SETTINGS_PASSWORD = "n0rm4ndy"
return settings
@pytest.fixture()
def migrations(transactional_db):
"""
This fixture returns a helper object to test Django data migrations.
Based on: https://gist.github.com/bennylope/82a6088c02fefdd47e18f3c04ec167af
"""
class Migrator(object):
def migrate(self, app, to):
migration = [(app, to)]
executor = MigrationExecutor(connection)
executor.migrate(migration)
return executor.loader.project_state(migration).apps
def reset(self):
call_command("migrate", no_input=True)
return Migrator()
@pytest.fixture
def requestsmock():
"""Return a context where requests are all mocked.
Usage::
def test_something(requestsmock):
requestsmock.get(
'https://example.com/path'
content=b'The content'
)
# Do stuff that involves requests.get('http://example.com/path')
"""
with requests_mock.mock() as m:
yield m
@pytest.fixture
def storage(settings):
settings.DEFAULT_FILE_STORAGE = "normandy.base.storage.NormandyInMemoryStorage"
from django.core.files.storage import default_storage
yield default_storage
dirs_to_delete = ["/"]
while len(dirs_to_delete) > 0:
dir_path = dirs_to_delete.pop()
paths, new_dirs = default_storage.listdir(dir_path)
dirs_to_delete.extend(new_dirs)
for path in paths:
default_storage.delete(path)
|
mpl-2.0
|
mozilla/normandy
|
normandy/base/tests/test_checks.py
|
1
|
1449
|
import pytest
from django.core.checks.registry import run_checks
from normandy.base import checks as base_checks
from normandy.recipes import checks as recipe_checks, geolocation as geolocation_module
@pytest.mark.django_db
def test_run_checks_happy_path():
errors = set(e.id for e in run_checks())
expected = set()
# If geolocation isn't enabled, expect that their is a geolocation warning
geolocation_module.load_geoip_database()
if geolocation_module.geoip_reader is None:
expected.add(recipe_checks.ERROR_GEOIP_DB_NOT_AVAILABLE)
assert errors == expected
@pytest.mark.django_db
def test_run_checks_all_things_that_can_go_wrong(settings):
settings.CDN_URL = "http://cdn.example.com"
settings.APP_SERVER_URL = "http://app.example.com"
settings.OIDC_REMOTE_AUTH_HEADER = "Y_HTTP_HEADER"
settings.OIDC_LOGOUT_URL = None
settings.USE_OIDC = True
errors = run_checks()
assert errors != []
error_ids = [x.id for x in errors]
assert base_checks.ERROR_MISCONFIGURED_CDN_URL_SLASH in error_ids
assert base_checks.ERROR_MISCONFIGURED_CDN_URL_HTTPS in error_ids
assert base_checks.ERROR_MISCONFIGURED_APP_SERVER_URL_SLASH in error_ids
assert base_checks.ERROR_MISCONFIGURED_APP_SERVER_URL_HTTPS in error_ids
assert base_checks.ERROR_MISCONFIGURED_OIDC_LOGOUT_URL in error_ids
assert base_checks.WARNING_MISCONFIGURED_OIDC_REMOTE_AUTH_HEADER_PREFIX in error_ids
|
mpl-2.0
|
mozilla/normandy
|
normandy/base/tests/test_auth_backends.py
|
1
|
3238
|
import pytest
from django.test import RequestFactory
from normandy.base.auth_backends import (
INFO_LOGIN_SUCCESS,
LoggingModelBackend,
EmailOnlyRemoteUserBackend,
WARNING_LOGIN_FAILURE,
)
from normandy.base.tests import UserFactory, Whatever
class TestLoggingModelBackend(object):
@pytest.fixture
def mock_logger(self, mocker):
return mocker.patch("normandy.base.auth_backends.logger")
@pytest.fixture
def mock_authenticate(self, mocker):
return mocker.patch("normandy.base.auth_backends.ModelBackend.authenticate")
def test_log_failure_username(self, mock_logger, mock_authenticate):
mock_authenticate.return_value = None
request = RequestFactory().get("/")
user = LoggingModelBackend().authenticate(
request, username="fakeuser", password="does.not.exist"
)
assert user is None
mock_logger.warning.assert_called_with(
Whatever.contains("fakeuser"), extra={"code": WARNING_LOGIN_FAILURE}
)
def test_log_failure_no_username(self, mock_logger, mock_authenticate):
mock_authenticate.return_value = None
request = RequestFactory().get("/")
user = LoggingModelBackend().authenticate(request, password="does.not.exist")
assert user is None
mock_logger.warning.assert_called_with(
Whatever.contains("no username provided"), extra={"code": WARNING_LOGIN_FAILURE}
)
def test_log_success(self, mock_logger, mock_authenticate):
mock_authenticate.return_value = True
request = RequestFactory().get("/")
user = LoggingModelBackend().authenticate(
request, username="fakeuser", password="does.not.exist"
)
assert user
mock_logger.info.assert_called_with(
Whatever.contains("fakeuser"), extra={"code": INFO_LOGIN_SUCCESS}
)
@pytest.mark.django_db
class TestEmailOnlyRemoteUserBackend(object):
@pytest.fixture
def backend(self):
return EmailOnlyRemoteUserBackend()
def test_it_works(self, backend):
user = backend.authenticate(request=None, remote_user="[email protected]")
assert user is not None
assert not user.is_anonymous
def test_it_requires_an_email(self, backend):
user = backend.authenticate(request=None, remote_user="not_an_email")
assert user is None
def test_it_adds_an_email_to_the_user(self, backend):
email = "[email protected]"
user = backend.authenticate(request=None, remote_user=email)
assert user.email == email
def test_existing_user(self, backend):
email = "[email protected]"
existing_user = UserFactory(username=email, email=email)
logged_in_user = backend.authenticate(request=None, remote_user=email)
assert existing_user == logged_in_user
assert logged_in_user.email == email
def test_existing_user_no_email(self, backend):
email = "[email protected]"
existing_user = UserFactory(username=email, email="")
logged_in_user = backend.authenticate(request=None, remote_user=email)
assert existing_user == logged_in_user
assert logged_in_user.email == email
|
mpl-2.0
|
mozilla/normandy
|
normandy/recipes/tests/test_signing.py
|
1
|
17765
|
import base64
import os
from datetime import datetime, timedelta
from unittest.mock import MagicMock, call
from django.core.exceptions import ImproperlyConfigured
import pytest
import pytz
from pyasn1.type import useful as pyasn1_useful
from pyasn1_modules import rfc5280
from normandy.base.tests import Whatever
from normandy.recipes import signing
@pytest.fixture
def mock_logger(mocker):
return mocker.patch("normandy.recipes.signing.logger")
class TestAutographer(object):
test_settings = {
"URL": "https://autograph.example.com/",
"HAWK_ID": "hawk id",
"HAWK_SECRET_KEY": "hawk secret key",
}
def test_it_checks_settings(self, settings):
"""Test that each required key is required individually"""
# Leave out URL
settings.AUTOGRAPH_URL = None
settings.AUTOGRAPH_HAWK_ID = "hawk id"
settings.AUTOGRAPH_HAWK_SECRET_KEY = "hawk secret key"
with pytest.raises(ImproperlyConfigured) as exc:
signing.Autographer()
assert "AUTOGRAPH_URL" in str(exc.value)
# Leave out HAWK_ID
settings.AUTOGRAPH_URL = "https://autograph.example.com"
settings.AUTOGRAPH_HAWK_ID = None
settings.AUTOGRAPH_HAWK_SECRET_KEY = "hawk secret key"
with pytest.raises(ImproperlyConfigured) as exc:
signing.Autographer()
assert "AUTOGRAPH_HAWK_ID" in str(exc.value)
# Leave out HAWK_SECRET_KEY
settings.AUTOGRAPH_URL = "https://autograph.example.com"
settings.AUTOGRAPH_HAWK_ID = "hawk id"
settings.AUTOGRAPH_HAWK_SECRET_KEY = None
with pytest.raises(ImproperlyConfigured) as exc:
signing.Autographer()
assert "AUTOGRAPH_HAWK_SECRET_KEY" in str(exc.value)
# Include everything
settings.AUTOGRAPH_URL = "https://autograph.example.com"
settings.AUTOGRAPH_HAWK_ID = "hawk id"
settings.AUTOGRAPH_HAWK_SECRET_KEY = "hawk secret key"
# assert doesn't raise
signing.Autographer()
def test_it_interacts_with_autograph_correctly(self, settings, mock_logger):
settings.AUTOGRAPH_URL = "https://autograph.example.com"
settings.AUTOGRAPH_HAWK_ID = "hawk id"
settings.AUTOGRAPH_HAWK_SECRET_KEY = "hawk secret key"
autographer = signing.Autographer()
autographer.session = MagicMock()
autographer.session.post.return_value.json.return_value = [
{
"content-signature": (
'x5u="https://example.com/fake_x5u_1";p384ecdsa=fake_signature_1'
),
"x5u": "https://example.com/fake_x5u_1",
"hash_algorithm": "sha384",
"ref": "fake_ref_1",
"signature": "fake_signature_1",
},
{
"content-signature": (
'x5u="https://example.com/fake_x5u_2";p384ecdsa=fake_signature_2'
),
"x5u": "https://example.com/fake_x5u_2",
"hash_algorithm": "sha384",
"ref": "fake_ref_2",
"signature": "fake_signature_2",
},
]
url = self.test_settings["URL"] + "sign/data"
foo_base64 = base64.b64encode(b"foo").decode("utf8")
bar_base64 = base64.b64encode(b"bar").decode("utf8")
# Assert the correct data is returned
assert autographer.sign_data([b"foo", b"bar"]) == [
{
"timestamp": Whatever(),
"signature": "fake_signature_1",
"x5u": "https://example.com/fake_x5u_1",
},
{
"timestamp": Whatever(),
"signature": "fake_signature_2",
"x5u": "https://example.com/fake_x5u_2",
},
]
# Assert that logging happened
mock_logger.info.assert_has_calls(
[
call(Whatever.contains("2"), extra={"code": signing.INFO_RECEIVED_SIGNATURES}),
call(Whatever.contains("fake_ref_1")),
call(Whatever.contains("fake_ref_2")),
]
)
# Assert the correct request was made
assert autographer.session.post.called_once_with(
[
url,
[
{"template": "content-signature", "input": foo_base64},
{"template": "content-signature", "input": bar_base64},
],
]
)
class TestVerifySignaturePubkey(object):
# known good data
data = '{"action":"console-log","arguments":{"message":"telemetry available"},"enabled":true,"filter_expression":"telemetry != undefined","id":1,"last_updated":"2017-01-02T11:32:07.687408Z","name":"mython\'s system addon test","revision_id":"6dc874ded7d14af9ef9c147c5d2ceef9d15b56ca933681e574cd96a50b75946e"}' # noqa
signature = "Prb0Jnb3icT0g_hZkgEyuzTlWrsTYrURXy6mzDTDh9WmqXdQBS05cV1mL1GUBbKIgcs02V8P1JvYR4LaJmSUvfZWYS66Jo8AdhcfEikoEZ0jvlLtiJDFxpT4eh2pk3XW" # noqa
pubkey = "MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEVEKiCAIkwRg1VFsP8JOYdSF6a3qvgbRPoEK9eTuLbrB6QixozscKR4iWJ8ZOOX6RPCRgFdfVDoZqjFBFNJN9QtRBk0mVtHbnErx64d2vMF0oWencS1hyLW2whgOgOz7p" # noqa
def test_known_good_signature(self):
assert signing.verify_signature_pubkey(self.data, self.signature, self.pubkey)
def test_raises_nice_error_for_too_short_signatures_bad_padding(self):
signature = "a_too_short_signature"
with pytest.raises(signing.WrongSignatureSize):
signing.verify_signature_pubkey(self.data, signature, self.pubkey)
def test_raises_nice_error_for_too_short_signatures_good_base64(self):
signature = "aa=="
with pytest.raises(signing.WrongSignatureSize):
signing.verify_signature_pubkey(self.data, signature, self.pubkey)
def test_raises_nice_error_for_wrong_signature(self):
# change the signature, but keep it a valid signature
signature = self.signature.replace("s", "S")
with pytest.raises(signing.SignatureDoesNotMatch):
signing.verify_signature_pubkey(self.data, signature, self.pubkey)
class TestVerifySignatureX5U(object):
def test_happy_path(self, mocker):
mock_verify_x5u = mocker.patch("normandy.recipes.signing.verify_x5u")
mock_der_encode = mocker.patch("normandy.recipes.signing.der_encode")
mock_verify_signature_pubkey = mocker.patch(
"normandy.recipes.signing.verify_signature_pubkey"
)
data = "abc"
signature = "signature"
x5u = "http://example.com/cert"
cert_contents = b"cert_contents"
encoded_cert_contents = base64.b64encode(cert_contents).decode()
mock_der_encode.return_value = cert_contents
public_key = "public_key"
cert = {"tbsCertificate": {"subjectPublicKeyInfo": public_key}}
mock_verify_x5u.return_value = cert
ret = signing.verify_signature_x5u(data, signature, x5u)
mock_verify_x5u.assert_called_with(x5u)
mock_der_encode.assert_called_with(public_key)
mock_verify_signature_pubkey.assert_called_with(data, signature, encoded_cert_contents)
assert ret == mock_verify_signature_pubkey.return_value
class TestExtractCertsFromPem(object):
def test_empty(self):
assert signing.extract_certs_from_pem("") == []
def test_real_certs(self):
path = os.path.join(os.path.dirname(__file__), "data", "test_certs.pem")
with open(path) as f:
certs = signing.extract_certs_from_pem(f.read())
assert len(certs) == 3
def test_incomplete_cert(self):
bad_data = "-----BEGIN CERTIFICATE-----\nMIIGXTCCBEWgAwIBAgIEAQAACjANBgkq"
with pytest.raises(signing.CertificateParseError) as exc:
signing.extract_certs_from_pem(bad_data)
assert "Unexpected end of input." in str(exc.value)
def test_not_a_cert(self):
bad_data = "hello world"
with pytest.raises(signing.CertificateParseError) as exc:
signing.extract_certs_from_pem(bad_data)
assert 'Unexpected input "hello world"' in str(exc.value)
class TestParseCertsFromDer(object):
def test_real_certs(self):
path = os.path.join(os.path.dirname(__file__), "data", "test_certs.pem")
with open(path) as f:
ders = signing.extract_certs_from_pem(f.read())
certs = [signing.parse_cert_from_der(der) for der in ders]
# Quick spot check on the CommonName value of the subjects of the certs
# If these are correct, the entire objects were probably parsed correctly
assert (
certs[0]["tbsCertificate"]["subject"]["rdnSequence"][4][0]["value"]
== b"\x13&normandy.content-signature.mozilla.org"
)
assert (
certs[1]["tbsCertificate"]["subject"]["rdnSequence"][3][0]["value"]
== b"\x13'Mozilla Signing Services Intermediate 1"
)
assert (
certs[2]["tbsCertificate"]["subject"]["rdnSequence"][3][0]["value"]
== b"\x13\x16root-ca-production-amo"
)
class TestCheckValidity(object):
def test_it_works(self):
now = datetime.utcnow().replace(tzinfo=pytz.utc)
not_before = now - timedelta(days=1)
not_after = now + timedelta(days=1)
assert signing.check_validity(not_before, not_after, None)
def test_not_yet_valid(self):
now = datetime.utcnow().replace(tzinfo=pytz.utc)
not_before = now + timedelta(days=1)
not_after = now + timedelta(days=2)
with pytest.raises(signing.CertificateNotYetValid):
signing.check_validity(not_before, not_after, None)
def test_expired(self):
now = datetime.utcnow().replace(tzinfo=pytz.utc)
not_before = now - timedelta(days=2)
not_after = now - timedelta(days=1)
with pytest.raises(signing.CertificateExpired):
signing.check_validity(not_before, not_after, None)
def test_expiring_early_ok(self):
now = datetime.utcnow().replace(tzinfo=pytz.utc)
not_before = now - timedelta(days=1)
not_after = now + timedelta(days=3)
expire_early = timedelta(days=2)
assert signing.check_validity(not_before, not_after, expire_early)
def test_expiring_early_not_ok(self):
now = datetime.utcnow().replace(tzinfo=pytz.utc)
not_before = now - timedelta(days=1)
not_after = now + timedelta(days=1)
expire_early = timedelta(days=2)
with pytest.raises(signing.CertificateExpiringSoon):
signing.check_validity(not_before, not_after, expire_early)
class TestVerifyX5u(object):
def _fake_cert(self, not_before=None, not_after=None):
fake_cert = rfc5280.Certificate()
fake_cert["tbsCertificate"] = rfc5280.TBSCertificate()
fake_cert["tbsCertificate"]["validity"] = rfc5280.Validity()
if not_before:
fake_cert["tbsCertificate"]["validity"]["notBefore"] = rfc5280.Time()
fake_cert["tbsCertificate"]["validity"]["notBefore"][
"utcTime"
] = pyasn1_useful.UTCTime.fromDateTime(not_before)
if not_after:
fake_cert["tbsCertificate"]["validity"]["notAfter"] = rfc5280.Time()
fake_cert["tbsCertificate"]["validity"]["notAfter"][
"utcTime"
] = pyasn1_useful.UTCTime.fromDateTime(not_after)
return fake_cert
def test_it_works(self, mocker, settings):
settings.CERTIFICATES_CHECK_VALIDITY = True
settings.CERTIFICATES_EXPECTED_ROOT_HASH = None
settings.CERTIFICATES_EXPECTED_SUBJECT_CN = None
settings.X5U_CACHE_TIME = 0 # don't cache, since mocks can't be cached
mock_requests = mocker.patch("normandy.recipes.signing.requests")
mock_extract_certs_from_pem = mocker.patch(
"normandy.recipes.signing.extract_certs_from_pem"
)
mock_parse_cert_from_der = mocker.patch("normandy.recipes.signing.parse_cert_from_der")
url = "https://example.com/cert.pem"
now = datetime.now()
not_before = now - timedelta(days=1)
not_after = now + timedelta(days=1)
mock_extract_certs_from_pem.return_value = ["a", "b"]
mock_parse_cert_from_der.return_value = self._fake_cert(
not_before=not_before, not_after=not_after
)
assert signing.verify_x5u(url) == mock_parse_cert_from_der.return_value
assert mock_requests.get.called_once_with(url)
body = mock_requests.get.return_value.content.decode.return_value
assert mock_extract_certs_from_pem.called_once_with(body)
assert mock_parse_cert_from_der.called_twice()
def test_invalid_dates(self, mocker, settings):
settings.CERTIFICATES_CHECK_VALIDITY = True
settings.CERTIFICATES_EXPECTED_ROOT_HASH = None
settings.CERTIFICATES_EXPECTED_SUBJECT_CN = None
settings.X5U_CACHE_TIME = 0 # don't cache, since mocks can't be cached
mock_requests = mocker.patch("normandy.recipes.signing.requests")
mock_extract_certs_from_pem = mocker.patch(
"normandy.recipes.signing.extract_certs_from_pem"
)
mock_parse_cert_from_der = mocker.patch("normandy.recipes.signing.parse_cert_from_der")
url = "https://example.com/cert.pem"
now = datetime.now().replace(tzinfo=pytz.UTC)
not_before = now - timedelta(days=2)
not_after = now - timedelta(days=1)
mock_extract_certs_from_pem.return_value = ["a"]
mock_parse_cert_from_der.return_value = self._fake_cert(
not_before=not_before, not_after=not_after
)
with pytest.raises(signing.CertificateExpired):
signing.verify_x5u(url)
assert mock_requests.get.called_once_with(url)
body = mock_requests.get.return_value.content.decode.return_value
assert mock_extract_certs_from_pem.called_once_with(body)
assert mock_parse_cert_from_der.called_once_with(
mock_extract_certs_from_pem.return_value[0]
)
def test_mixed_timestamp_format(self, mocker):
# The certificate used for testing expired on 2018-04-24. This test is
# only concerned with the parsing of the dates, so mock the call to the
# validate function and assert about the values of the dates.
mock_requests = mocker.patch("normandy.recipes.signing.requests")
mock_check_validity = mocker.patch("normandy.recipes.signing.check_validity")
path = os.path.join(os.path.dirname(__file__), "data", "mixed_timestamps_certs.pem")
with open(path, "rb") as f:
mock_requests.get.return_value.content = f.read()
assert signing.verify_x5u("https://example.com/cert.pem")
assert mock_check_validity.mock_calls == [
call(
datetime(2017, 12, 25, tzinfo=pytz.UTC),
datetime(2018, 4, 24, tzinfo=pytz.UTC),
None,
),
call(
datetime(2017, 5, 4, 0, 12, 39, tzinfo=pytz.UTC),
datetime(2019, 5, 4, 0, 12, 39, tzinfo=pytz.UTC),
None,
),
call(
datetime(2015, 3, 17, 22, 53, 57, tzinfo=pytz.UTC),
datetime(2025, 3, 14, 22, 53, 57, tzinfo=pytz.UTC),
None,
),
]
def test_it_checks_cert_root(self, mocker, settings):
path = os.path.join(os.path.dirname(__file__), "data", "test_certs.pem")
with open(path) as f:
cert_pem = f.read()
settings.CERTIFICATES_CHECK_VALIDITY = False
settings.CERTIFICATES_EXPECTED_ROOT_HASH = "CO:FF:EE:CO:FF:EE:CO:FF:EE:CO:FF:EE:CO:FF:EE:CO:FF:EE:CO:FF:EE:CO:FF:EE:CO:FF:EE:CO:FF:EE:CO:FF"
settings.CERTIFICATES_EXPECTED_SUBJECT_CN = None
mock_requests = mocker.patch("normandy.recipes.signing.requests")
mock_requests.get.return_value.content.decode.return_value = cert_pem
with pytest.raises(signing.CertificateHasWrongRoot):
signing.verify_x5u("https://example.com/cert.pem")
def test_it_checks_cert_subject(self, mocker, settings):
path = os.path.join(os.path.dirname(__file__), "data", "test_certs.pem")
with open(path) as f:
cert_pem = f.read()
settings.CERTIFICATES_CHECK_VALIDITY = False
settings.CERTIFICATES_EXPECTED_ROOT_HASH = None
settings.CERTIFICATES_EXPECTED_SUBJECT_CN = "wrong.subject.example.com"
mock_requests = mocker.patch("normandy.recipes.signing.requests")
mock_requests.get.return_value.content.decode.return_value = cert_pem
with pytest.raises(signing.CertificateHasWrongSubject):
signing.verify_x5u("https://example.com/cert.pem")
class TestReadTimestampObject(object):
def test_it_reads_utc_time_format(self):
dt = datetime(2018, 1, 25, 16, 1, 13, 0, tzinfo=pytz.UTC)
obj = rfc5280.Time()
obj["utcTime"] = pyasn1_useful.UTCTime.fromDateTime(dt)
assert signing.read_timestamp_object(obj) == dt
def test_it_reads_general_time_format(self):
dt = datetime(2018, 1, 25, 16, 1, 13, 0, tzinfo=pytz.UTC)
obj = rfc5280.Time()
obj["generalTime"] = pyasn1_useful.GeneralizedTime.fromDateTime(dt)
assert signing.read_timestamp_object(obj) == dt
def test_it_errors_on_unsupported_formats(self):
with pytest.raises(signing.BadCertificate) as exc:
signing.read_timestamp_object({"unsupportedTimestamp": b"gibberish"})
assert "Timestamp not in expected format" in str(exc.value)
assert "unsupportedTimestamp" in str(exc.value)
|
mpl-2.0
|
mozilla/normandy
|
normandy/recipes/migrations/0005_auto_20180503_2146.py
|
1
|
2487
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-05-03 21:46
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [("recipes", "0004_auto_20180502_2340")]
operations = [
migrations.RemoveField(model_name="approvalrequest", name="revision"),
migrations.RemoveField(model_name="recipe", name="approved_revision"),
migrations.RemoveField(model_name="recipe", name="latest_revision"),
migrations.DeleteModel(name="RecipeRevision"),
migrations.RenameModel("TmpRecipeRevision", "RecipeRevision"),
migrations.AlterField(
model_name="reciperevision",
name="action",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="recipe_revisions",
to="recipes.Action",
),
),
migrations.AlterField(
model_name="reciperevision",
name="recipe",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="revisions",
to="recipes.Recipe",
),
),
migrations.AlterField(
model_name="reciperevision",
name="user",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="recipe_revisions",
to=settings.AUTH_USER_MODEL,
),
),
migrations.RenameField(
model_name="approvalrequest", old_name="tmp_revision", new_name="revision"
),
migrations.RenameField(
model_name="recipe", old_name="approved_tmp_revision", new_name="approved_revision"
),
migrations.RenameField(
model_name="recipe", old_name="latest_tmp_revision", new_name="latest_revision"
),
migrations.AlterField(
model_name="approvalrequest",
name="revision",
field=models.OneToOneField(
default=None,
on_delete=django.db.models.deletion.CASCADE,
related_name="approval_request",
to="recipes.RecipeRevision",
),
preserve_default=False,
),
]
|
mpl-2.0
|
mozilla/normandy
|
normandy/base/tests/test_utils.py
|
1
|
2452
|
import json
from normandy.base.utils import canonical_json_dumps, get_client_ip, sri_hash
class TestGetClientIp(object):
def test_no_proxies(self, rf, settings):
"""If there are no proxies, REMOTE_ADDR should be used."""
settings.NUM_PROXIES = 0
client_ip = "1.1.1.1"
req = rf.get("/", HTTP_X_FORWARDED_FOR="fake", REMOTE_ADDR=client_ip)
assert get_client_ip(req) == client_ip
def test_one_proxy(self, rf, settings):
"""
If there is one proxy, the right-most value in HTTP_X_FORWARDED_FOR
should be used.
"""
settings.NUM_PROXIES = 1
client_ip = "1.1.1.1"
nginx_ip = "2.2.2.2"
forwarded_for = ", ".join(["fake", client_ip])
req = rf.get("/", HTTP_X_FORWARDED_FOR=forwarded_for, REMOTE_ADDR=nginx_ip)
assert get_client_ip(req) == client_ip
def test_two_proxies(self, rf, settings):
"""
If there are two proxies, the second-from-the-right value in
HTTP_X_FORWARDED_FOR should be used.
"""
settings.NUM_PROXIES = 2
client_ip = "1.1.1.1"
elb_ip = "2.2.2.2"
nginx_ip = "3.3.3.3"
forwarded_for = ", ".join(["fake", client_ip, elb_ip])
req = rf.get("/", HTTP_X_FORWARDED_FOR=forwarded_for, REMOTE_ADDR=nginx_ip)
assert get_client_ip(req) == client_ip
class TestCanonicalJsonDumps(object):
def test_it_works(self):
data = {"a": 1, "b": 2}
assert canonical_json_dumps(data) == '{"a":1,"b":2}'
def test_it_works_with_euro_signs(self):
data = {"USD": "$", "EURO": "€"}
assert canonical_json_dumps(data) == r'{"EURO":"\u20ac","USD":"$"}'
def test_it_escapes_quotes_properly(self):
data = {"message": 'It "works", I think'}
dumped = canonical_json_dumps(data)
assert dumped == r'{"message":"It \"works\", I think"}'
json.loads(dumped)
class TestSRIHash(object):
def test_it_works(self):
# Pre-generated base64 hash of the string "foobar"
expected = "sha384-PJww2fZl501RXIQpYNSkUcg6ASX9Pec5LXs3IxrxDHLqWK7fzfiaV2W/kCr5Ps8G"
assert sri_hash(b"foobar") == expected
def test_url_safe_works(self):
# Pre-generated base64 hash of the string "normandy", urlsafe-ed
expected = "sha384-6FydcL0iVnTqXT3rBg6YTrlz0K-mw57n9zxTEmxYG6FIO_vZTMlTWsbkxHchsO65"
assert sri_hash(b"normandy", url_safe=True) == expected
|
mpl-2.0
|
mozilla/normandy
|
contract-tests/v3_api/test_approval_request_close.py
|
1
|
1496
|
from support.assertions import assert_valid_schema
from support.helpers import new_recipe
from urllib.parse import urljoin
def test_approval_request_close(conf, requests_session, headers):
# Get an action we can work with
action_response = requests_session.get(
urljoin(conf.getoption("server"), "/api/v3/action/"), headers=headers
)
data = action_response.json()
action_id = data["results"][0]["id"]
# Create a recipe
recipe_details = new_recipe(requests_session, action_id, conf.getoption("server"), headers)
# Create an approval request
response = requests_session.post(
urljoin(
conf.getoption("server"),
"/api/v3/recipe_revision/{}/request_approval/".format(
recipe_details["latest_revision_id"]
),
),
headers=headers,
)
data = response.json()
approval_id = data["id"]
assert response.status_code != 404
assert_valid_schema(response.json())
# Close the approval request
response = requests_session.post(
urljoin(
conf.getoption("server"), "/api/v3/approval_request/{}/close/".format(approval_id)
),
headers=headers,
)
assert response.status_code == 204
# Verify that is no longer exists
response = requests_session.get(
urljoin(conf.getoption("server"), "/api/v3/approval_request/{}/".format(approval_id)),
headers=headers,
)
assert response.status_code == 404
|
mpl-2.0
|
mozilla/normandy
|
normandy/recipes/migrations/0014_auto_20190228_1128.py
|
1
|
2576
|
# Generated by Django 2.0.13 on 2019-02-28 11:28
import json
import sys
from urllib.parse import unquote_plus, urlparse
from django.db import migrations
def get_filename_from_url(url):
return unquote_plus(urlparse(url).path.split("/")[-1])
def add_extension_id(apps, schema_editor):
Action = apps.get_model("recipes", "Action")
RecipeRevision = apps.get_model("recipes", "RecipeRevision")
Extension = apps.get_model("studies", "Extension")
failures = []
try:
action = Action.objects.get(name="opt-out-study")
except Action.DoesNotExist:
return # Do nothing since there cannot be any recipes using the opt-out-study action
revisions = RecipeRevision.objects.filter(action_id=action.id)
for revision in revisions:
arguments = json.loads(revision.arguments_json)
url = arguments.get("addonUrl")
filename = get_filename_from_url(url)
try:
extension = Extension.objects.get(xpi=f"extensions/{filename}")
except Extension.DoesNotExist:
failures.append(
{
"filename": filename,
"addon_url": arguments.get("addonUrl"),
"revision_id": revision.id,
"recipe_id": revision.recipe.id,
}
)
else:
arguments["extensionApiId"] = extension.id
revision.arguments_json = json.dumps(arguments)
revision.save()
if failures:
for failure in failures:
sys.stderr.write(f"{failure}\n")
raise Exception("There were failures in this migration.")
def remove_extension_id(apps, schema_editor):
Action = apps.get_model("recipes", "Action")
RecipeRevision = apps.get_model("recipes", "RecipeRevision")
try:
action = Action.objects.get(name="opt-out-study")
except Action.DoesNotExist:
return # Do nothing since there cannot be any recipes using the opt-out-study action
revisions = RecipeRevision.objects.filter(action_id=action.id)
for revision in revisions:
arguments = json.loads(revision.arguments_json)
if "extensionApiId" in arguments:
arguments.pop("extensionApiId")
revision.arguments_json = json.dumps(arguments)
revision.save()
class Migration(migrations.Migration):
dependencies = [
("recipes", "0013_auto_20181018_2049"),
("studies", "0006_extension_hash_algorithm"),
]
operations = [migrations.RunPython(add_extension_id, remove_extension_id)]
|
mpl-2.0
|
mozilla/normandy
|
normandy/recipes/tests/test_schema.py
|
1
|
2480
|
import pytest
from normandy.base.tests import GQ
from normandy.recipes.tests import ActionFactory, ApprovalRequestFactory, RecipeFactory
@pytest.mark.django_db
class TestQuery(object):
def test_resolve_all_action(self, gql_client):
a = ActionFactory()
res = gql_client.execute(GQ().query.allActions.fields("id"))
assert res == {"data": {"allActions": [{"id": str(a.id)}]}}
def test_resolve_action_by_id(self, gql_client):
a = ActionFactory()
res = gql_client.execute(GQ().query.action(id=a.id).fields("name"))
assert res == {"data": {"action": {"name": a.name}}}
def test_resolve_action_by_name(self, gql_client):
a = ActionFactory()
res = gql_client.execute(GQ().query.action(name=a.name).fields("id"))
assert res == {"data": {"action": {"id": str(a.id)}}}
def test_resolve_all_approval_requests(self, gql_client):
a = ApprovalRequestFactory()
res = gql_client.execute(GQ().query.allApprovalRequests.fields("id"))
assert res == {"data": {"allApprovalRequests": [{"id": str(a.id)}]}}
def test_resolve_approval_request_by_id(self, gql_client):
a = ApprovalRequestFactory()
res = gql_client.execute(
GQ().query.approvalRequest(id=a.id).fields(GQ().revision.fields("id"))
)
assert res == {"data": {"approvalRequest": {"revision": {"id": str(a.revision.id)}}}}
def test_resolve_all_recipes(self, gql_client):
r = RecipeFactory()
res = gql_client.execute(GQ().query.allRecipes.fields("id"))
assert res == {"data": {"allRecipes": [{"id": str(r.id)}]}}
def test_resolve_recipe_by_id(self, gql_client):
r = RecipeFactory()
res = gql_client.execute(
GQ().query.recipe(id=r.id).fields(GQ().latestRevision.fields("id"))
)
assert res == {"data": {"recipe": {"latestRevision": {"id": str(r.latest_revision.id)}}}}
def test_resolve_all_recipe_revisions(self, gql_client):
r = RecipeFactory()
res = gql_client.execute(GQ().query.allRecipeRevisions.fields("id"))
assert res == {"data": {"allRecipeRevisions": [{"id": str(r.latest_revision.id)}]}}
def test_resolve_recipe_revision_by_id(self, gql_client):
r = RecipeFactory()
res = gql_client.execute(GQ().query.recipeRevision(id=r.latest_revision.id).fields("id"))
assert res == {"data": {"recipeRevision": {"id": str(r.latest_revision.id)}}}
|
mpl-2.0
|
developmentseed/landsat-util
|
setup.py
|
1
|
1158
|
#!/usr/bin/env python
# Landsat Util
# License: CC0 1.0 Universal
try:
from setuptools import setup
setup_kwargs = {'entry_points': {'console_scripts':['landsat=landsat.landsat:__main__']}}
except ImportError:
from distutils.core import setup
setup_kwargs = {'scripts': ['bin/landsat']}
from landsat import __version__
def readme():
with open('README.rst') as f:
return f.read()
with open('requirements.txt') as fid:
INSTALL_REQUIRES = [l.strip() for l in fid.readlines() if l]
with open('requirements-dev.txt') as fid:
TEST_REQUIRES = [l.strip() for l in fid.readlines() if l]
setup(
name='landsat-util',
version=__version__,
description='A utility to search, download and process Landsat 8' +
' satellite imagery',
long_description=readme(),
author='Development Seed',
author_email='[email protected]',
url='https://github.com/developmentseed/landsat-util',
packages=['landsat'],
include_package_data=True,
license='CCO',
platforms='Posix; MacOS X; Windows',
install_requires=INSTALL_REQUIRES,
tests_require=TEST_REQUIRES,
**setup_kwargs
)
|
cc0-1.0
|
developmentseed/landsat-util
|
landsat/mixins.py
|
3
|
2950
|
# Pansharpened Image Process using Rasterio
# Landsat Util
# License: CC0 1.0 Universal
from __future__ import print_function, division, absolute_import
import sys
import subprocess
from termcolor import colored
class VerbosityMixin(object):
"""
Verbosity Mixin that generates beautiful stdout outputs.
"""
verbose = False
def output(self, value, normal=False, color=None, error=False,
arrow=False, indent=None):
""" Handles verbosity of this calls.
if priority is set to 1, the value is printed
if class instance verbose is True, the value is printed
:param value:
a string representing the message to be printed
:type value:
String
:param normal:
if set to true the message is always printed, otherwise it is only shown if verbosity is set
:type normal:
boolean
:param color:
The color of the message, choices: 'red', 'green', 'blue'
:type normal:
String
:param error:
if set to true the message appears in red
:type error:
Boolean
:param arrow:
if set to true an arrow appears before the message
:type arrow:
Boolean
:param indent:
indents the message based on the number provided
:type indent:
Boolean
:returns:
void
"""
if error and value and (normal or self.verbose):
return self._print(value, color='red', indent=indent)
if self.verbose or normal:
return self._print(value, color, arrow, indent)
return
def subprocess(self, argv):
"""
Execute subprocess commands with proper ouput.
This is no longer used in landsat-util
:param argv:
A list of subprocess arguments
:type argv:
List
:returns:
void
"""
if self.verbose:
proc = subprocess.Popen(argv, stderr=subprocess.PIPE)
else:
proc = subprocess.Popen(argv, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
self.output(proc.stderr.read(), error=True)
return
def exit(self, message):
""" outputs an exit message and exits
:param message:
The message to be outputed
:type message:
String
:returns:
void
"""
self.output(message, normal=True, color="green")
sys.exit()
def _print(self, msg, color=None, arrow=False, indent=None):
""" Print the msg with the color provided. """
if color:
msg = colored(msg, color)
if arrow:
msg = colored('===> ', 'blue') + msg
if indent:
msg = (' ' * indent) + msg
print(msg)
return msg
|
cc0-1.0
|
rmmh/skybot
|
plugins/seen.py
|
3
|
3063
|
" seen.py: written by sklnd in about two beers July 2009"
from builtins import object
import time
import unittest
from util import hook, timesince
def db_init(db):
"check to see that our db has the the seen table and return a connection."
db.execute(
"create table if not exists seen(name, time, quote, chan, "
"primary key(name, chan))"
)
db.commit()
@hook.singlethread
@hook.event("PRIVMSG", ignorebots=False)
def seeninput(paraml, input=None, db=None, bot=None):
db_init(db)
db.execute(
"insert or replace into seen(name, time, quote, chan)" "values(?,?,?,?)",
(input.nick.lower(), time.time(), input.msg, input.chan),
)
db.commit()
@hook.command
def seen(inp, nick="", chan="", db=None, input=None):
".seen <nick> -- Tell when a nickname was last in active in irc"
inp = inp.lower()
if input.conn.nick.lower() == inp:
# user is looking for us, being a smartass
return "You need to get your eyes checked."
if inp == nick.lower():
return "Have you looked in a mirror lately?"
db_init(db)
last_seen = db.execute(
"select name, time, quote from seen where" " name = ? and chan = ?", (inp, chan)
).fetchone()
if last_seen:
reltime = timesince.timesince(last_seen[1])
if last_seen[2][0:1] == "\x01":
return "%s was last seen %s ago: *%s %s*" % (
inp,
reltime,
inp,
last_seen[2][8:-1],
)
else:
return "%s was last seen %s ago saying: %s" % (inp, reltime, last_seen[2])
else:
return "I've never seen %s" % inp
class SeenTest(unittest.TestCase):
class Mock(object):
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def setUp(self):
import sqlite3
self.db = sqlite3.connect(":memory:")
def seeninput(self, nick, msg, chan="#test"):
seeninput(None, db=self.db, input=self.Mock(nick=nick, msg=msg, chan=chan))
def seen(self, inp, nick="bob", chan="#test", bot_nick="skybot"):
return seen(
inp,
nick=nick,
chan=chan,
db=self.db,
input=self.Mock(conn=self.Mock(nick=bot_nick)),
)
def test_missing(self):
assert "I've never seen nemo" in self.seen("NEMO")
def test_seen(self):
self.seeninput("nemo", "witty banter")
assert "nemo was last seen" in self.seen("nemo")
assert "witty banter" in self.seen("nemo")
def test_seen_missing_channel(self):
self.seeninput("nemo", "msg", chan="#secret")
assert "never seen" in self.seen("nemo")
def test_seen_ctcp(self):
self.seeninput("nemo", "\x01ACTION test lol\x01")
assert self.seen("nemo").endswith("ago: *nemo test lol*")
def test_snark_eyes(self):
assert "eyes checked" in self.seen("skybot", bot_nick="skybot")
def test_snark_mirror(self):
assert "mirror" in self.seen("bob", nick="bob")
|
unlicense
|
rmmh/skybot
|
test/plugins/test_choose.py
|
3
|
1458
|
from unittest import TestCase
from mock import patch
from choose import choose
class TestChoose(TestCase):
def test_choose_one_choice(self):
expected = "the decision is up to you"
actual = choose("foo")
assert expected == actual
def test_choose_same_thing(self):
expected = "foo"
actual = choose("foo, foo, foo")
assert expected == actual
def test_choose_two_choices(self):
actual = choose("foo, bar")
assert actual in ["foo", "bar"]
def test_choose_choices_space(self):
expected_values = ["foo", "bar"]
actual = choose("foo bar")
assert actual in expected_values
def test_choose_strips_whitespace(self):
expected_values = ["foo", "bar"]
actual = choose(" foo ," " bar ")
assert actual in expected_values
@patch("random.choice")
def test_choose_end_comma_behavior(self, mock_random_choice):
mock_random_choice.side_effect = lambda arr: arr[0]
expected = "the decision is up to you"
actual = choose("foo,")
assert actual == expected
@patch("random.choice")
def test_choose_collapse_commas(self, mock_random_choice):
# Should never be an empty string here
mock_random_choice.side_effect = lambda arr: arr[1]
expected = "bar"
actual = choose("foo,,bar")
assert actual == expected
|
unlicense
|
rmmh/skybot
|
core/irc.py
|
3
|
10652
|
from __future__ import print_function
from builtins import map
from builtins import object
import re
import socket
import time
import _thread
import queue
from ssl import wrap_socket, CERT_NONE, CERT_REQUIRED, SSLError
DEFAULT_NAME = "skybot"
DEFAULT_REALNAME = "Python bot - http://github.com/rmmh/skybot"
DEFAULT_NICKSERV_NAME = "nickserv"
DEFAULT_NICKSERV_COMMAND = "IDENTIFY %s"
def decode(txt):
for codec in ("utf-8", "iso-8859-1", "shift_jis", "cp1252"):
try:
return txt.decode(codec)
except UnicodeDecodeError:
continue
return txt.decode("utf-8", "ignore")
def censor(text, censored_strings=None):
text = re.sub("[\n\r]+", " ", text)
if not censored_strings:
return text
words = map(re.escape, censored_strings)
pattern = "(%s)" % "|".join(words)
text = re.sub(pattern, "[censored]", text)
return text
class crlf_tcp(object):
"Handles tcp connections that consist of utf-8 lines ending with crlf"
def __init__(self, host, port, timeout=300):
self.ibuffer = b""
self.obuffer = b""
self.oqueue = queue.Queue() # lines to be sent out
self.iqueue = queue.Queue() # lines that were received
self.socket = self.create_socket()
self.host = host
self.port = port
self.timeout = timeout
def create_socket(self):
return socket.socket(socket.AF_INET, socket.TCP_NODELAY)
def run(self):
while True:
try:
self.socket.connect((self.host, self.port))
except socket.timeout:
print("timed out connecting to %s:%s" % (self.host, self.port))
time.sleep(60)
else:
break
_thread.start_new_thread(self.recv_loop, ())
_thread.start_new_thread(self.send_loop, ())
def recv_from_socket(self, nbytes):
return self.socket.recv(nbytes)
def get_timeout_exception_type(self):
return socket.timeout
def handle_receive_exception(self, error, last_timestamp):
if time.time() - last_timestamp > self.timeout:
self.iqueue.put(StopIteration)
self.socket.close()
return True
return False
def recv_loop(self):
last_timestamp = time.time()
while True:
try:
data = self.recv_from_socket(4096)
self.ibuffer += data
if data:
last_timestamp = time.time()
else:
if time.time() - last_timestamp > self.timeout:
self.iqueue.put(StopIteration)
self.socket.close()
return
time.sleep(1)
except (self.get_timeout_exception_type(), socket.error) as e:
if self.handle_receive_exception(e, last_timestamp):
return
continue
while b"\r\n" in self.ibuffer:
line, self.ibuffer = self.ibuffer.split(b"\r\n", 1)
self.iqueue.put(decode(line))
def send_loop(self):
while True:
line = self.oqueue.get().splitlines()[0][:500]
print(">>> %s" % line)
self.obuffer += line.encode("utf-8", "replace") + b"\r\n"
while self.obuffer:
sent = self.socket.send(self.obuffer)
self.obuffer = self.obuffer[sent:]
class crlf_ssl_tcp(crlf_tcp):
"Handles ssl tcp connetions that consist of utf-8 lines ending with crlf"
def __init__(self, host, port, ignore_cert_errors, timeout=300):
self.ignore_cert_errors = ignore_cert_errors
crlf_tcp.__init__(self, host, port, timeout)
def create_socket(self):
return wrap_socket(
crlf_tcp.create_socket(self),
server_side=False,
cert_reqs=CERT_NONE if self.ignore_cert_errors else CERT_REQUIRED,
)
def recv_from_socket(self, nbytes):
return self.socket.read(nbytes)
def get_timeout_exception_type(self):
return SSLError
def handle_receive_exception(self, error, last_timestamp):
return crlf_tcp.handle_receive_exception(self, error, last_timestamp)
def zip_channels(channels):
channels.sort(key=lambda x: " " not in x) # keyed channels first
chans = []
keys = []
for channel in channels:
if " " in channel:
chan, key = channel.split(" ")
chans.append(chan)
keys.append(key)
else:
chans.append(channel)
chans = ",".join(chans)
if keys:
return [chans, ",".join(keys)]
else:
return [chans]
def test_zip_channels():
assert zip_channels(["#a", "#b c", "#d"]) == ["#b,#a,#d", "c"]
assert zip_channels(["#a", "#b"]) == ["#a,#b"]
class IRC(object):
IRC_PREFIX_REM = re.compile(r"(.*?) (.*?) (.*)").match
IRC_NOPROFEIX_REM = re.compile(r"()(.*?) (.*)").match
IRC_NETMASK_REM = re.compile(r":?([^!@]*)!?([^@]*)@?(.*)").match
IRC_PARAM_REF = re.compile(r"(?:^|(?<= ))(:.*|[^ ]+)").findall
"handles the IRC protocol"
# see the docs/ folder for more information on the protocol
def __init__(self, conf):
self.conn = None
self.nick = DEFAULT_NAME
self.user = DEFAULT_NAME
self.realname = DEFAULT_REALNAME
self.user_mode = None
self.server_host = None
self.server_port = 6667
self.server_password = None
self.nickserv_password = None
self.nickserv_name = DEFAULT_NICKSERV_NAME
self.nickserv_command = DEFAULT_NICKSERV_COMMAND
self.channels = []
self.admins = []
self.censored_strings = []
self.out = queue.Queue() # responses from the server are placed here
# format: [rawline, prefix, command, params,
# nick, user, host, paramlist, msg]
self.set_conf(conf)
self.connect()
_thread.start_new_thread(self.parse_loop, ())
def set_conf(self, conf):
self.nick = conf.get("nick", DEFAULT_NAME)
self.user = conf.get("user", DEFAULT_NAME)
self.realname = conf.get("realname", DEFAULT_REALNAME)
self.user_mode = conf.get("mode", None)
self.server_host = conf["server"]
self.server_port = conf.get("port", 6667)
self.server_password = conf.get("server_password", None)
self.nickserv_password = conf.get("nickserv_password", None)
self.nickserv_name = conf.get("nickserv_name", DEFAULT_NICKSERV_NAME)
self.nickserv_command = conf.get("nickserv_command", DEFAULT_NICKSERV_COMMAND)
self.channels = conf.get("channels", [])
self.admins = conf.get("admins", [])
self.censored_strings = conf.get("censored_strings", [])
if self.conn is not None:
self.join_channels()
def create_connection(self):
return crlf_tcp(self.server_host, self.server_port)
def connect(self):
self.conn = self.create_connection()
_thread.start_new_thread(self.conn.run, ())
self.cmd("NICK", [self.nick])
self.cmd("USER", [self.user, "3", "*", self.realname])
if self.server_password:
self.cmd("PASS", [self.server_password])
def parse_loop(self):
while True:
msg = self.conn.iqueue.get()
if msg == StopIteration:
self.connect()
continue
if msg.startswith(":"): # has a prefix
prefix, command, params = self.IRC_PREFIX_REM(msg).groups()
else:
prefix, command, params = self.IRC_NOPROFEIX_REM(msg).groups()
nick, user, host = self.IRC_NETMASK_REM(prefix).groups()
paramlist = self.IRC_PARAM_REF(params)
lastparam = ""
if paramlist:
if paramlist[-1].startswith(":"):
paramlist[-1] = paramlist[-1][1:]
lastparam = paramlist[-1]
self.out.put(
[msg, prefix, command, params, nick, user, host, paramlist, lastparam]
)
if command == "PING":
self.cmd("PONG", paramlist)
def join(self, channel):
self.cmd("JOIN", channel.split(" ")) # [chan, password]
def join_channels(self):
if self.channels:
# TODO: send multiple join commands for large channel lists
self.cmd("JOIN", zip_channels(self.channels))
def msg(self, target, text):
self.cmd("PRIVMSG", [target, text])
def cmd(self, command, params=None):
if params:
params[-1] = ":" + params[-1]
params = [censor(p, self.censored_strings) for p in params]
self.send(command + " " + " ".join(params))
else:
self.send(command)
def send(self, str):
self.conn.oqueue.put(str)
class FakeIRC(IRC):
def __init__(self, conf):
self.set_conf(conf)
self.out = queue.Queue() # responses from the server are placed here
self.f = open(fn, "rb")
_thread.start_new_thread(self.parse_loop, ())
def parse_loop(self):
while True:
msg = decode(self.f.readline()[9:])
if msg == "":
print("!!!!DONE READING FILE!!!!")
return
if msg.startswith(":"): # has a prefix
prefix, command, params = irc_prefix_rem(msg).groups()
else:
prefix, command, params = irc_noprefix_rem(msg).groups()
nick, user, host = irc_netmask_rem(prefix).groups()
paramlist = irc_param_ref(params)
lastparam = ""
if paramlist:
if paramlist[-1].startswith(":"):
paramlist[-1] = paramlist[-1][1:]
lastparam = paramlist[-1]
self.out.put(
[msg, prefix, command, params, nick, user, host, paramlist, lastparam]
)
if command == "PING":
self.cmd("PONG", [params])
def cmd(self, command, params=None):
pass
class SSLIRC(IRC):
def __init__(self, conf):
super(SSLIRC, self).__init__(conf=conf)
self.server_port = 6697
self.server_ignore_cert = False
def set_conf(self, conf):
super(SSLIRC, self).set_conf(conf)
self.server_port = conf.get("port", 6697)
self.server_ignore_cert = conf.get("ignore_cert", False)
def create_connection(self):
return crlf_ssl_tcp(self.server_host, self.server_port, self.server_ignore_cert)
|
unlicense
|
rmmh/skybot
|
plugins/util/http.py
|
3
|
5942
|
from future.standard_library import hooks
from lxml import etree, html
import binascii
import collections
import hmac
import json
import random
import time
from hashlib import sha1
from builtins import str
from builtins import range
try:
from http.cookiejar import CookieJar
except:
from future.backports.http.cookiejar import CookieJar
with hooks():
import urllib.request, urllib.parse, urllib.error
from urllib.parse import (
quote,
unquote,
urlencode,
urlparse,
parse_qsl,
quote_plus as _quote_plus,
)
from urllib.error import HTTPError, URLError
ua_skybot = "Skybot/1.0 https://github.com/rmmh/skybot"
ua_firefox = (
"Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.6) "
"Gecko/20070725 Firefox/2.0.0.6"
)
ua_internetexplorer = "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)"
def get_cookie_jar():
if not hasattr(get_cookie_jar, "memo"):
get_cookie_jar.memo = CookieJar()
return get_cookie_jar.memo
def clear_expired_cookies():
get_cookie_jar().clear_expired_cookies()
def get(*args, **kwargs):
return open(*args, **kwargs).read().decode("utf-8")
def get_html(*args, **kwargs):
return html.fromstring(open(*args, **kwargs).read())
def get_xml(*args, **kwargs):
return etree.fromstring(open(*args, **kwargs).read())
def get_json(*args, **kwargs):
return json.loads(open(*args, **kwargs).read())
def open(
url,
query_params=None,
post_data=None,
json_data=None,
get_method=None,
cookies=False,
oauth=False,
oauth_keys=None,
headers=None,
**kwargs
):
if query_params is None:
query_params = {}
query_params.update(kwargs)
url = prepare_url(url, query_params)
if post_data and isinstance(post_data, collections.Mapping):
post_data = urllib.parse.urlencode(post_data)
post_data = post_data.encode("UTF-8")
if json_data and isinstance(json_data, dict):
post_data = json.dumps(json_data).encode("utf-8")
request = urllib.request.Request(url, post_data)
if json_data:
request.add_header("Content-Type", "application/json")
if get_method is not None:
request.get_method = lambda: get_method
if headers is not None:
for header_key, header_value in headers.items():
request.add_header(header_key, header_value)
if "User-Agent" not in request.headers:
request.add_header("User-Agent", ua_skybot)
if oauth:
nonce = oauth_nonce()
timestamp = oauth_timestamp()
api_url, req_data = url.split("?")
unsigned_request = oauth_unsigned_request(
nonce, timestamp, req_data, oauth_keys["consumer"], oauth_keys["access"]
)
signature = oauth_sign_request(
"GET",
api_url,
req_data,
unsigned_request,
oauth_keys["consumer_secret"],
oauth_keys["access_secret"],
)
header = oauth_build_header(
nonce, signature, timestamp, oauth_keys["consumer"], oauth_keys["access"]
)
request.add_header("Authorization", header)
if cookies:
opener = urllib.request.build_opener(
urllib.request.HTTPCookieProcessor(get_cookie_jar())
)
else:
opener = urllib.request.build_opener()
return opener.open(request)
def prepare_url(url, queries):
if queries:
scheme, netloc, path, query, fragment = urllib.parse.urlsplit(str(url))
query = dict(urllib.parse.parse_qsl(query))
query.update(queries)
query = urllib.parse.urlencode(
dict((to_utf8(key), to_utf8(value)) for key, value in query.items())
)
url = urllib.parse.urlunsplit((scheme, netloc, path, query, fragment))
return url
def to_utf8(s):
if isinstance(s, str):
return s.encode("utf8", "ignore")
else:
return str(s)
def quote_plus(s):
return _quote_plus(to_utf8(s))
def oauth_nonce():
return "".join([str(random.randint(0, 9)) for i in range(8)])
def oauth_timestamp():
return str(int(time.time()))
def oauth_unsigned_request(nonce, timestamp, req, consumer, token):
d = {
"oauth_consumer_key": consumer,
"oauth_nonce": nonce,
"oauth_signature_method": "HMAC-SHA1",
"oauth_timestamp": timestamp,
"oauth_token": token,
"oauth_version": "1.0",
}
d.update(urllib.parse.parse_qsl(req))
request_items = d.items()
# TODO: Remove this when Python 2 is no longer supported.
# some of the fields are actual string and others are
# a wrapper of str for the python 3 migration.
# Convert them all so that they sort correctly.
request_items = [(str(k), str(v)) for k, v in request_items]
return quote(urllib.parse.urlencode(sorted(request_items, key=lambda key: key[0])))
def oauth_build_header(nonce, signature, timestamp, consumer, token):
d = {
"oauth_consumer_key": consumer,
"oauth_nonce": nonce,
"oauth_signature": signature,
"oauth_signature_method": "HMAC-SHA1",
"oauth_timestamp": timestamp,
"oauth_token": token,
"oauth_version": "1.0",
}
header = "OAuth "
for x in sorted(d, key=lambda key: key[0]):
header += x + '="' + d[x] + '", '
return header[:-1]
def oauth_sign_request(
method, url, params, unsigned_request, consumer_secret, token_secret
):
key = consumer_secret + "&" + token_secret
key = key.encode("utf-8", "replace")
base = method + "&" + quote(url, "") + "&" + unsigned_request
base = base.encode("utf-8", "replace")
hash = hmac.new(key, base, sha1)
signature = quote(binascii.b2a_base64(hash.digest())[:-1])
return signature
def unescape(s):
if not s.strip():
return s
return html.fromstring(s).text_content()
|
unlicense
|
rmmh/skybot
|
plugins/crowdcontrol.py
|
3
|
1132
|
# crowdcontrol.py by craisins in 2014
# Bot must have some sort of op or admin privileges to be useful
import re
import time
from util import hook
# Use "crowdcontrol" array in config
# syntax
# rule:
# re: RegEx. regular expression to match
# msg: String. message to display either with kick or as a warning
# kick: Integer. 1 for True, 0 for False on if to kick user
# ban_length: Integer. (optional) Length of time (seconds) to ban user. (-1 to never unban, 0 to not ban, > 1 for time)
@hook.regex(r".*")
def crowdcontrol(inp, kick=None, ban=None, unban=None, reply=None, bot=None):
inp = inp.group(0)
for rule in bot.config.get("crowdcontrol", []):
if re.search(rule["re"], inp) is not None:
should_kick = rule.get("kick", 0)
ban_length = rule.get("ban_length", 0)
reason = rule.get("msg")
if ban_length != 0:
ban()
if should_kick:
kick(reason=reason)
elif "msg" in rule:
reply(reason)
if ban_length > 0:
time.sleep(ban_length)
unban()
|
unlicense
|
pytube/pytube
|
pytube/query.py
|
1
|
12622
|
"""This module provides a query interface for media streams and captions."""
from collections.abc import Mapping, Sequence
from typing import Callable, List, Optional, Union
from pytube import Caption, Stream
from pytube.helpers import deprecated
class StreamQuery(Sequence):
"""Interface for querying the available media streams."""
def __init__(self, fmt_streams):
"""Construct a :class:`StreamQuery <StreamQuery>`.
param list fmt_streams:
list of :class:`Stream <Stream>` instances.
"""
self.fmt_streams = fmt_streams
self.itag_index = {int(s.itag): s for s in fmt_streams}
def filter(
self,
fps=None,
res=None,
resolution=None,
mime_type=None,
type=None,
subtype=None,
file_extension=None,
abr=None,
bitrate=None,
video_codec=None,
audio_codec=None,
only_audio=None,
only_video=None,
progressive=None,
adaptive=None,
is_dash=None,
custom_filter_functions=None,
):
"""Apply the given filtering criterion.
:param fps:
(optional) The frames per second.
:type fps:
int or None
:param resolution:
(optional) Alias to ``res``.
:type res:
str or None
:param res:
(optional) The video resolution.
:type resolution:
str or None
:param mime_type:
(optional) Two-part identifier for file formats and format contents
composed of a "type", a "subtype".
:type mime_type:
str or None
:param type:
(optional) Type part of the ``mime_type`` (e.g.: audio, video).
:type type:
str or None
:param subtype:
(optional) Sub-type part of the ``mime_type`` (e.g.: mp4, mov).
:type subtype:
str or None
:param file_extension:
(optional) Alias to ``sub_type``.
:type file_extension:
str or None
:param abr:
(optional) Average bitrate (ABR) refers to the average amount of
data transferred per unit of time (e.g.: 64kbps, 192kbps).
:type abr:
str or None
:param bitrate:
(optional) Alias to ``abr``.
:type bitrate:
str or None
:param video_codec:
(optional) Video compression format.
:type video_codec:
str or None
:param audio_codec:
(optional) Audio compression format.
:type audio_codec:
str or None
:param bool progressive:
Excludes adaptive streams (one file contains both audio and video
tracks).
:param bool adaptive:
Excludes progressive streams (audio and video are on separate
tracks).
:param bool is_dash:
Include/exclude dash streams.
:param bool only_audio:
Excludes streams with video tracks.
:param bool only_video:
Excludes streams with audio tracks.
:param custom_filter_functions:
(optional) Interface for defining complex filters without
subclassing.
:type custom_filter_functions:
list or None
"""
filters = []
if res or resolution:
filters.append(lambda s: s.resolution == (res or resolution))
if fps:
filters.append(lambda s: s.fps == fps)
if mime_type:
filters.append(lambda s: s.mime_type == mime_type)
if type:
filters.append(lambda s: s.type == type)
if subtype or file_extension:
filters.append(lambda s: s.subtype == (subtype or file_extension))
if abr or bitrate:
filters.append(lambda s: s.abr == (abr or bitrate))
if video_codec:
filters.append(lambda s: s.video_codec == video_codec)
if audio_codec:
filters.append(lambda s: s.audio_codec == audio_codec)
if only_audio:
filters.append(
lambda s: (
s.includes_audio_track and not s.includes_video_track
),
)
if only_video:
filters.append(
lambda s: (
s.includes_video_track and not s.includes_audio_track
),
)
if progressive:
filters.append(lambda s: s.is_progressive)
if adaptive:
filters.append(lambda s: s.is_adaptive)
if custom_filter_functions:
filters.extend(custom_filter_functions)
if is_dash is not None:
filters.append(lambda s: s.is_dash == is_dash)
return self._filter(filters)
def _filter(self, filters: List[Callable]) -> "StreamQuery":
fmt_streams = self.fmt_streams
for filter_lambda in filters:
fmt_streams = filter(filter_lambda, fmt_streams)
return StreamQuery(list(fmt_streams))
def order_by(self, attribute_name: str) -> "StreamQuery":
"""Apply a sort order. Filters out stream the do not have the attribute.
:param str attribute_name:
The name of the attribute to sort by.
"""
has_attribute = [
s
for s in self.fmt_streams
if getattr(s, attribute_name) is not None
]
# Check that the attributes have string values.
if has_attribute and isinstance(
getattr(has_attribute[0], attribute_name), str
):
# Try to return a StreamQuery sorted by the integer representations
# of the values.
try:
return StreamQuery(
sorted(
has_attribute,
key=lambda s: int(
"".join(
filter(str.isdigit, getattr(s, attribute_name))
)
), # type: ignore # noqa: E501
)
)
except ValueError:
pass
return StreamQuery(
sorted(has_attribute, key=lambda s: getattr(s, attribute_name))
)
def desc(self) -> "StreamQuery":
"""Sort streams in descending order.
:rtype: :class:`StreamQuery <StreamQuery>`
"""
return StreamQuery(self.fmt_streams[::-1])
def asc(self) -> "StreamQuery":
"""Sort streams in ascending order.
:rtype: :class:`StreamQuery <StreamQuery>`
"""
return self
def get_by_itag(self, itag: int) -> Optional[Stream]:
"""Get the corresponding :class:`Stream <Stream>` for a given itag.
:param int itag:
YouTube format identifier code.
:rtype: :class:`Stream <Stream>` or None
:returns:
The :class:`Stream <Stream>` matching the given itag or None if
not found.
"""
return self.itag_index.get(int(itag))
def get_by_resolution(self, resolution: str) -> Optional[Stream]:
"""Get the corresponding :class:`Stream <Stream>` for a given resolution.
Stream must be a progressive mp4.
:param str resolution:
Video resolution i.e. "720p", "480p", "360p", "240p", "144p"
:rtype: :class:`Stream <Stream>` or None
:returns:
The :class:`Stream <Stream>` matching the given itag or None if
not found.
"""
return self.filter(
progressive=True, subtype="mp4", resolution=resolution
).first()
def get_lowest_resolution(self) -> Optional[Stream]:
"""Get lowest resolution stream that is a progressive mp4.
:rtype: :class:`Stream <Stream>` or None
:returns:
The :class:`Stream <Stream>` matching the given itag or None if
not found.
"""
return (
self.filter(progressive=True, subtype="mp4")
.order_by("resolution")
.first()
)
def get_highest_resolution(self) -> Optional[Stream]:
"""Get highest resolution stream that is a progressive video.
:rtype: :class:`Stream <Stream>` or None
:returns:
The :class:`Stream <Stream>` matching the given itag or None if
not found.
"""
return self.filter(progressive=True).order_by("resolution").last()
def get_audio_only(self, subtype: str = "mp4") -> Optional[Stream]:
"""Get highest bitrate audio stream for given codec (defaults to mp4)
:param str subtype:
Audio subtype, defaults to mp4
:rtype: :class:`Stream <Stream>` or None
:returns:
The :class:`Stream <Stream>` matching the given itag or None if
not found.
"""
return (
self.filter(only_audio=True, subtype=subtype)
.order_by("abr")
.last()
)
def otf(self, is_otf: bool = False) -> "StreamQuery":
"""Filter stream by OTF, useful if some streams have 404 URLs
:param bool is_otf: Set to False to retrieve only non-OTF streams
:rtype: :class:`StreamQuery <StreamQuery>`
:returns: A StreamQuery object with otf filtered streams
"""
return self._filter([lambda s: s.is_otf == is_otf])
def first(self) -> Optional[Stream]:
"""Get the first :class:`Stream <Stream>` in the results.
:rtype: :class:`Stream <Stream>` or None
:returns:
the first result of this query or None if the result doesn't
contain any streams.
"""
try:
return self.fmt_streams[0]
except IndexError:
return None
def last(self):
"""Get the last :class:`Stream <Stream>` in the results.
:rtype: :class:`Stream <Stream>` or None
:returns:
Return the last result of this query or None if the result
doesn't contain any streams.
"""
try:
return self.fmt_streams[-1]
except IndexError:
pass
@deprecated("Get the size of this list directly using len()")
def count(self, value: Optional[str] = None) -> int: # pragma: no cover
"""Get the count of items in the list.
:rtype: int
"""
if value:
return self.fmt_streams.count(value)
return len(self)
@deprecated("This object can be treated as a list, all() is useless")
def all(self) -> List[Stream]: # pragma: no cover
"""Get all the results represented by this query as a list.
:rtype: list
"""
return self.fmt_streams
def __getitem__(self, i: Union[slice, int]):
return self.fmt_streams[i]
def __len__(self) -> int:
return len(self.fmt_streams)
def __repr__(self) -> str:
return f"{self.fmt_streams}"
class CaptionQuery(Mapping):
"""Interface for querying the available captions."""
def __init__(self, captions: List[Caption]):
"""Construct a :class:`Caption <Caption>`.
param list captions:
list of :class:`Caption <Caption>` instances.
"""
self.lang_code_index = {c.code: c for c in captions}
@deprecated(
"This object can be treated as a dictionary, i.e. captions['en']"
)
def get_by_language_code(
self, lang_code: str
) -> Optional[Caption]: # pragma: no cover
"""Get the :class:`Caption <Caption>` for a given ``lang_code``.
:param str lang_code:
The code that identifies the caption language.
:rtype: :class:`Caption <Caption>` or None
:returns:
The :class:`Caption <Caption>` matching the given ``lang_code`` or
None if it does not exist.
"""
return self.lang_code_index.get(lang_code)
@deprecated("This object can be treated as a dictionary")
def all(self) -> List[Caption]: # pragma: no cover
"""Get all the results represented by this query as a list.
:rtype: list
"""
return list(self.lang_code_index.values())
def __getitem__(self, i: str):
return self.lang_code_index[i]
def __len__(self) -> int:
return len(self.lang_code_index)
def __iter__(self):
return iter(self.lang_code_index.values())
def __repr__(self) -> str:
return f"{self.lang_code_index}"
|
unlicense
|
pytube/pytube
|
pytube/innertube.py
|
1
|
11658
|
"""This module is designed to interact with the innertube API.
This module is NOT intended to be used directly by end users, as each of the
interfaces returns raw results. These should instead be parsed to extract
the useful information for the end user.
"""
# Native python imports
import json
import os
import pathlib
import time
from urllib import parse
# Local imports
from pytube import request
# YouTube on TV client secrets
_client_id = '861556708454-d6dlm3lh05idd8npek18k6be8ba3oc68.apps.googleusercontent.com'
_client_secret = 'SboVhoG9s0rNafixCSGGKXAT'
# Extracted API keys -- unclear what these are linked to.
_api_keys = [
'AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8',
'AIzaSyCtkvNIR1HCEwzsqK6JuE6KqpyjusIRI30',
'AIzaSyA8eiZmM1FaDVjRy-df2KTyQ_vz_yYM39w',
'AIzaSyC8UYZpvA2eknNex0Pjid0_eTLJoDu6los',
'AIzaSyCjc_pVEDi4qsv5MtC2dMXzpIaDoRFLsxw',
'AIzaSyDHQ9ipnphqTzDqZsbtd8_Ru4_kiKVQe2k'
]
_default_clients = {
'WEB': {
'context': {
'client': {
'clientName': 'WEB',
'clientVersion': '2.20200720.00.02'
}
},
'api_key': 'AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8'
},
'ANDROID': {
'context': {
'client': {
'clientName': 'ANDROID',
'clientVersion': '16.20'
}
},
'api_key': 'AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8'
},
'WEB_EMBED': {
'context': {
'client': {
'clientName': 'WEB',
'clientVersion': '2.20210721.00.00',
'clientScreen': 'EMBED'
}
},
'api_key': 'AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8'
},
'ANDROID_EMBED': {
'context': {
'client': {
'clientName': 'ANDROID',
'clientVersion': '16.20',
'clientScreen': 'EMBED'
}
},
'api_key': 'AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8'
}
}
_token_timeout = 1800
_cache_dir = pathlib.Path(__file__).parent.resolve() / '__cache__'
_token_file = os.path.join(_cache_dir, 'tokens.json')
class InnerTube:
"""Object for interacting with the innertube API."""
def __init__(self, client='ANDROID', use_oauth=False, allow_cache=True):
"""Initialize an InnerTube object.
:param str client:
Client to use for the object.
Default to web because it returns the most playback types.
:param bool use_oauth:
Whether or not to authenticate to YouTube.
:param bool allow_cache:
Allows caching of oauth tokens on the machine.
"""
self.context = _default_clients[client]['context']
self.api_key = _default_clients[client]['api_key']
self.access_token = None
self.refresh_token = None
self.use_oauth = use_oauth
self.allow_cache = allow_cache
# Stored as epoch time
self.expires = None
# Try to load from file if specified
if self.use_oauth and self.allow_cache:
# Try to load from file if possible
if os.path.exists(_token_file):
with open(_token_file) as f:
data = json.load(f)
self.access_token = data['access_token']
self.refresh_token = data['refresh_token']
self.expires = data['expires']
self.refresh_bearer_token()
def cache_tokens(self):
"""Cache tokens to file if allowed."""
if not self.allow_cache:
return
data = {
'access_token': self.access_token,
'refresh_token': self.refresh_token,
'expires': self.expires
}
if not os.path.exists(_cache_dir):
os.mkdir(_cache_dir)
with open(_token_file, 'w') as f:
json.dump(data, f)
def refresh_bearer_token(self, force=False):
"""Refreshes the OAuth token if necessary.
:param bool force:
Force-refresh the bearer token.
"""
if not self.use_oauth:
return
# Skip refresh if it's not necessary and not forced
if self.expires > time.time() and not force:
return
# Subtracting 30 seconds is arbitrary to avoid potential time discrepencies
start_time = int(time.time() - 30)
data = {
'client_id': _client_id,
'client_secret': _client_secret,
'grant_type': 'refresh_token',
'refresh_token': self.refresh_token
}
response = request._execute_request(
'https://oauth2.googleapis.com/token',
'POST',
headers={
'Content-Type': 'application/json'
},
data=data
)
response_data = json.loads(response.read())
self.access_token = response_data['access_token']
self.expires = start_time + response_data['expires_in']
self.cache_tokens()
def fetch_bearer_token(self):
"""Fetch an OAuth token."""
# Subtracting 30 seconds is arbitrary to avoid potential time discrepencies
start_time = int(time.time() - 30)
data = {
'client_id': _client_id,
'scope': 'https://www.googleapis.com/auth/youtube'
}
response = request._execute_request(
'https://oauth2.googleapis.com/device/code',
'POST',
headers={
'Content-Type': 'application/json'
},
data=data
)
response_data = json.loads(response.read())
verification_url = response_data['verification_url']
user_code = response_data['user_code']
print(f'Please open {verification_url} and input code {user_code}')
input('Press enter when you have completed this step.')
data = {
'client_id': _client_id,
'client_secret': _client_secret,
'device_code': response_data['device_code'],
'grant_type': 'urn:ietf:params:oauth:grant-type:device_code'
}
response = request._execute_request(
'https://oauth2.googleapis.com/token',
'POST',
headers={
'Content-Type': 'application/json'
},
data=data
)
response_data = json.loads(response.read())
self.access_token = response_data['access_token']
self.refresh_token = response_data['refresh_token']
self.expires = start_time + response_data['expires_in']
self.cache_tokens()
@property
def base_url(self):
"""Return the base url endpoint for the innertube API."""
return 'https://www.youtube.com/youtubei/v1'
@property
def base_data(self):
"""Return the base json data to transmit to the innertube API."""
return {
'context': self.context
}
@property
def base_params(self):
"""Return the base query parameters to transmit to the innertube API."""
return {
'key': self.api_key,
'contentCheckOk': True,
'racyCheckOk': True
}
def _call_api(self, endpoint, query, data):
"""Make a request to a given endpoint with the provided query parameters and data."""
# Remove the API key if oauth is being used.
if self.use_oauth:
del query['key']
endpoint_url = f'{endpoint}?{parse.urlencode(query)}'
headers = {
'Content-Type': 'application/json',
}
# Add the bearer token if applicable
if self.use_oauth:
if self.access_token:
self.refresh_bearer_token()
headers['Authorization'] = f'Bearer {self.access_token}'
else:
self.fetch_bearer_token()
headers['Authorization'] = f'Bearer {self.access_token}'
response = request._execute_request(
endpoint_url,
'POST',
headers=headers,
data=data
)
return json.loads(response.read())
def browse(self):
"""Make a request to the browse endpoint.
TODO: Figure out how we can use this
"""
# endpoint = f'{self.base_url}/browse' # noqa:E800
...
# return self._call_api(endpoint, query, self.base_data) # noqa:E800
def config(self):
"""Make a request to the config endpoint.
TODO: Figure out how we can use this
"""
# endpoint = f'{self.base_url}/config' # noqa:E800
...
# return self._call_api(endpoint, query, self.base_data) # noqa:E800
def guide(self):
"""Make a request to the guide endpoint.
TODO: Figure out how we can use this
"""
# endpoint = f'{self.base_url}/guide' # noqa:E800
...
# return self._call_api(endpoint, query, self.base_data) # noqa:E800
def next(self):
"""Make a request to the next endpoint.
TODO: Figure out how we can use this
"""
# endpoint = f'{self.base_url}/next' # noqa:E800
...
# return self._call_api(endpoint, query, self.base_data) # noqa:E800
def player(self, video_id):
"""Make a request to the player endpoint.
:param str video_id:
The video id to get player info for.
:rtype: dict
:returns:
Raw player info results.
"""
endpoint = f'{self.base_url}/player'
query = {
'videoId': video_id,
}
query.update(self.base_params)
return self._call_api(endpoint, query, self.base_data)
def search(self, search_query, continuation=None):
"""Make a request to the search endpoint.
:param str search_query:
The query to search.
:rtype: dict
:returns:
Raw search query results.
"""
endpoint = f'{self.base_url}/search'
query = {
'query': search_query
}
query.update(self.base_params)
data = {}
if continuation:
data['continuation'] = continuation
data.update(self.base_data)
return self._call_api(endpoint, query, data)
def verify_age(self, video_id):
"""Make a request to the age_verify endpoint.
Notable examples of the types of video this verification step is for:
* https://www.youtube.com/watch?v=QLdAhwSBZ3w
* https://www.youtube.com/watch?v=hc0ZDaAZQT0
:param str video_id:
The video id to get player info for.
:rtype: dict
:returns:
Returns information that includes a URL for bypassing certain restrictions.
"""
endpoint = f'{self.base_url}/verify_age'
data = {
'nextEndpoint': {
'urlEndpoint': {
'url': f'/watch?v={video_id}'
}
},
'setControvercy': True
}
data.update(self.base_data)
result = self._call_api(endpoint, self.base_params, data)
return result
def get_transcript(self, video_id):
"""Make a request to the get_transcript endpoint.
This is likely related to captioning for videos, but is currently untested.
"""
endpoint = f'{self.base_url}/get_transcript'
query = {
'videoId': video_id,
}
query.update(self.base_params)
result = self._call_api(endpoint, query, self.base_data)
return result
|
unlicense
|
pytube/pytube
|
tests/test_request.py
|
1
|
1820
|
import socket
import os
import pytest
from unittest import mock
from urllib.error import URLError
from pytube import request
from pytube.exceptions import MaxRetriesExceeded
@mock.patch("pytube.request.urlopen")
def test_streaming(mock_urlopen):
# Given
fake_stream_binary = [
os.urandom(8 * 1024),
os.urandom(8 * 1024),
os.urandom(8 * 1024),
None,
]
mock_response = mock.Mock()
mock_response.read.side_effect = fake_stream_binary
mock_response.info.return_value = {"Content-Range": "bytes 200-1000/24576"}
mock_urlopen.return_value = mock_response
# When
response = request.stream("http://fakeassurl.gov/streaming_test")
# Then
assert len(b''.join(response)) == 3 * 8 * 1024
assert mock_response.read.call_count == 4
@mock.patch('pytube.request.urlopen')
def test_timeout(mock_urlopen):
exc = URLError(reason=socket.timeout('timed_out'))
mock_urlopen.side_effect = exc
generator = request.stream('http://fakeassurl.gov/timeout_test', timeout=1)
with pytest.raises(MaxRetriesExceeded):
next(generator)
@mock.patch("pytube.request.urlopen")
def test_headers(mock_urlopen):
response = mock.Mock()
response.info.return_value = {"content-length": "16384"}
mock_urlopen.return_value = response
response = request.head("http://fakeassurl.gov")
assert response == {"content-length": "16384"}
@mock.patch("pytube.request.urlopen")
def test_get(mock_urlopen):
response = mock.Mock()
response.read.return_value = "<html></html>".encode("utf-8")
mock_urlopen.return_value = response
response = request.get("http://fakeassurl.gov")
assert response == "<html></html>"
def test_get_non_http():
with pytest.raises(ValueError): # noqa: PT011
request.get("file://bad")
|
unlicense
|
pytube/pytube
|
pytube/contrib/playlist.py
|
1
|
14204
|
"""Module to download a complete playlist from a youtube channel."""
import json
import logging
from collections.abc import Sequence
from datetime import date, datetime
from typing import Dict, Iterable, List, Optional, Tuple, Union
from pytube import extract, request, YouTube
from pytube.helpers import cache, DeferredGeneratorList, install_proxy, uniqueify
logger = logging.getLogger(__name__)
class Playlist(Sequence):
"""Load a YouTube playlist with URL"""
def __init__(self, url: str, proxies: Optional[Dict[str, str]] = None):
if proxies:
install_proxy(proxies)
self._input_url = url
# These need to be initialized as None for the properties.
self._html = None
self._ytcfg = None
self._initial_data = None
self._sidebar_info = None
self._playlist_id = None
@property
def playlist_id(self):
"""Get the playlist id.
:rtype: str
"""
if self._playlist_id:
return self._playlist_id
self._playlist_id = extract.playlist_id(self._input_url)
return self._playlist_id
@property
def playlist_url(self):
"""Get the base playlist url.
:rtype: str
"""
return f"https://www.youtube.com/playlist?list={self.playlist_id}"
@property
def html(self):
"""Get the playlist page html.
:rtype: str
"""
if self._html:
return self._html
self._html = request.get(self.playlist_url)
return self._html
@property
def ytcfg(self):
"""Extract the ytcfg from the playlist page html.
:rtype: dict
"""
if self._ytcfg:
return self._ytcfg
self._ytcfg = extract.get_ytcfg(self.html)
return self._ytcfg
@property
def initial_data(self):
"""Extract the initial data from the playlist page html.
:rtype: dict
"""
if self._initial_data:
return self._initial_data
else:
self._initial_data = extract.initial_data(self.html)
return self._initial_data
@property
def sidebar_info(self):
"""Extract the sidebar info from the playlist page html.
:rtype: dict
"""
if self._sidebar_info:
return self._sidebar_info
else:
self._sidebar_info = self.initial_data['sidebar'][
'playlistSidebarRenderer']['items']
return self._sidebar_info
@property
def yt_api_key(self):
"""Extract the INNERTUBE_API_KEY from the playlist ytcfg.
:rtype: str
"""
return self.ytcfg['INNERTUBE_API_KEY']
def _paginate(
self, until_watch_id: Optional[str] = None
) -> Iterable[List[str]]:
"""Parse the video links from the page source, yields the /watch?v=
part from video link
:param until_watch_id Optional[str]: YouTube Video watch id until
which the playlist should be read.
:rtype: Iterable[List[str]]
:returns: Iterable of lists of YouTube watch ids
"""
videos_urls, continuation = self._extract_videos(
json.dumps(extract.initial_data(self.html))
)
if until_watch_id:
try:
trim_index = videos_urls.index(f"/watch?v={until_watch_id}")
yield videos_urls[:trim_index]
return
except ValueError:
pass
yield videos_urls
# Extraction from a playlist only returns 100 videos at a time
# if self._extract_videos returns a continuation there are more
# than 100 songs inside a playlist, so we need to add further requests
# to gather all of them
if continuation:
load_more_url, headers, data = self._build_continuation_url(continuation)
else:
load_more_url, headers, data = None, None, None
while load_more_url and headers and data: # there is an url found
logger.debug("load more url: %s", load_more_url)
# requesting the next page of videos with the url generated from the
# previous page, needs to be a post
req = request.post(load_more_url, extra_headers=headers, data=data)
# extract up to 100 songs from the page loaded
# returns another continuation if more videos are available
videos_urls, continuation = self._extract_videos(req)
if until_watch_id:
try:
trim_index = videos_urls.index(f"/watch?v={until_watch_id}")
yield videos_urls[:trim_index]
return
except ValueError:
pass
yield videos_urls
if continuation:
load_more_url, headers, data = self._build_continuation_url(
continuation
)
else:
load_more_url, headers, data = None, None, None
def _build_continuation_url(self, continuation: str) -> Tuple[str, dict, dict]:
"""Helper method to build the url and headers required to request
the next page of videos
:param str continuation: Continuation extracted from the json response
of the last page
:rtype: Tuple[str, dict, dict]
:returns: Tuple of an url and required headers for the next http
request
"""
return (
(
# was changed to this format (and post requests)
# between 2021.03.02 and 2021.03.03
"https://www.youtube.com/youtubei/v1/browse?key="
f"{self.yt_api_key}"
),
{
"X-YouTube-Client-Name": "1",
"X-YouTube-Client-Version": "2.20200720.00.02",
},
# extra data required for post request
{
"continuation": continuation,
"context": {
"client": {
"clientName": "WEB",
"clientVersion": "2.20200720.00.02"
}
}
}
)
@staticmethod
def _extract_videos(raw_json: str) -> Tuple[List[str], Optional[str]]:
"""Extracts videos from a raw json page
:param str raw_json: Input json extracted from the page or the last
server response
:rtype: Tuple[List[str], Optional[str]]
:returns: Tuple containing a list of up to 100 video watch ids and
a continuation token, if more videos are available
"""
initial_data = json.loads(raw_json)
try:
# this is the json tree structure, if the json was extracted from
# html
section_contents = initial_data["contents"][
"twoColumnBrowseResultsRenderer"][
"tabs"][0]["tabRenderer"]["content"][
"sectionListRenderer"]["contents"]
try:
# Playlist without submenus
important_content = section_contents[
0]["itemSectionRenderer"][
"contents"][0]["playlistVideoListRenderer"]
except (KeyError, IndexError, TypeError):
# Playlist with submenus
important_content = section_contents[
1]["itemSectionRenderer"][
"contents"][0]["playlistVideoListRenderer"]
videos = important_content["contents"]
except (KeyError, IndexError, TypeError):
try:
# this is the json tree structure, if the json was directly sent
# by the server in a continuation response
# no longer a list and no longer has the "response" key
important_content = initial_data['onResponseReceivedActions'][0][
'appendContinuationItemsAction']['continuationItems']
videos = important_content
except (KeyError, IndexError, TypeError) as p:
logger.info(p)
return [], None
try:
continuation = videos[-1]['continuationItemRenderer'][
'continuationEndpoint'
]['continuationCommand']['token']
videos = videos[:-1]
except (KeyError, IndexError):
# if there is an error, no continuation is available
continuation = None
# remove duplicates
return (
uniqueify(
list(
# only extract the video ids from the video data
map(
lambda x: (
f"/watch?v="
f"{x['playlistVideoRenderer']['videoId']}"
),
videos
)
),
),
continuation,
)
def trimmed(self, video_id: str) -> Iterable[str]:
"""Retrieve a list of YouTube video URLs trimmed at the given video ID
i.e. if the playlist has video IDs 1,2,3,4 calling trimmed(3) returns
[1,2]
:type video_id: str
video ID to trim the returned list of playlist URLs at
:rtype: List[str]
:returns:
List of video URLs from the playlist trimmed at the given ID
"""
for page in self._paginate(until_watch_id=video_id):
yield from (self._video_url(watch_path) for watch_path in page)
def url_generator(self):
"""Generator that yields video URLs.
:Yields: Video URLs
"""
for page in self._paginate():
for video in page:
yield self._video_url(video)
@property # type: ignore
@cache
def video_urls(self) -> DeferredGeneratorList:
"""Complete links of all the videos in playlist
:rtype: List[str]
:returns: List of video URLs
"""
return DeferredGeneratorList(self.url_generator())
def videos_generator(self):
for url in self.video_urls:
yield YouTube(url)
@property
def videos(self) -> Iterable[YouTube]:
"""Yields YouTube objects of videos in this playlist
:rtype: List[YouTube]
:returns: List of YouTube
"""
return DeferredGeneratorList(self.videos_generator())
def __getitem__(self, i: Union[slice, int]) -> Union[str, List[str]]:
return self.video_urls[i]
def __len__(self) -> int:
return len(self.video_urls)
def __repr__(self) -> str:
return f"{repr(self.video_urls)}"
@property
@cache
def last_updated(self) -> Optional[date]:
"""Extract the date that the playlist was last updated.
For some playlists, this will be a specific date, which is returned as a datetime
object. For other playlists, this is an estimate such as "1 week ago". Due to the
fact that this value is returned as a string, pytube does a best-effort parsing
where possible, and returns the raw string where it is not possible.
:return: Date of last playlist update where possible, else the string provided
:rtype: datetime.date
"""
last_updated_text = self.sidebar_info[0]['playlistSidebarPrimaryInfoRenderer'][
'stats'][2]['runs'][1]['text']
try:
date_components = last_updated_text.split()
month = date_components[0]
day = date_components[1].strip(',')
year = date_components[2]
return datetime.strptime(
f"{month} {day:0>2} {year}", "%b %d %Y"
).date()
except (IndexError, KeyError):
return last_updated_text
@property
@cache
def title(self) -> Optional[str]:
"""Extract playlist title
:return: playlist title (name)
:rtype: Optional[str]
"""
return self.sidebar_info[0]['playlistSidebarPrimaryInfoRenderer'][
'title']['runs'][0]['text']
@property
def description(self) -> str:
return self.sidebar_info[0]['playlistSidebarPrimaryInfoRenderer'][
'description']['simpleText']
@property
def length(self):
"""Extract the number of videos in the playlist.
:return: Playlist video count
:rtype: int
"""
count_text = self.sidebar_info[0]['playlistSidebarPrimaryInfoRenderer'][
'stats'][0]['runs'][0]['text']
count_text = count_text.replace(',','')
return int(count_text)
@property
def views(self):
"""Extract view count for playlist.
:return: Playlist view count
:rtype: int
"""
# "1,234,567 views"
views_text = self.sidebar_info[0]['playlistSidebarPrimaryInfoRenderer'][
'stats'][1]['simpleText']
# "1,234,567"
count_text = views_text.split()[0]
# "1234567"
count_text = count_text.replace(',', '')
return int(count_text)
@property
def owner(self):
"""Extract the owner of the playlist.
:return: Playlist owner name.
:rtype: str
"""
return self.sidebar_info[1]['playlistSidebarSecondaryInfoRenderer'][
'videoOwner']['videoOwnerRenderer']['title']['runs'][0]['text']
@property
def owner_id(self):
"""Extract the channel_id of the owner of the playlist.
:return: Playlist owner's channel ID.
:rtype: str
"""
return self.sidebar_info[1]['playlistSidebarSecondaryInfoRenderer'][
'videoOwner']['videoOwnerRenderer']['title']['runs'][0][
'navigationEndpoint']['browseEndpoint']['browseId']
@property
def owner_url(self):
"""Create the channel url of the owner of the playlist.
:return: Playlist owner's channel url.
:rtype: str
"""
return f'https://www.youtube.com/channel/{self.owner_id}'
@staticmethod
def _video_url(watch_path: str):
return f"https://www.youtube.com{watch_path}"
|
unlicense
|
mozilla-iam/cis
|
python-modules/cis_profile/tests/test_fake_profile.py
|
1
|
2844
|
import pytest
from boto3.dynamodb.types import TypeDeserializer
from cis_profile import fake_profile
from cis_profile import profile
from cis_profile import exceptions
class TestFakeProfile(object):
def test_fake_user(self):
u = fake_profile.FakeUser()
print(u.user_id.value)
j = u.as_json()
d = u.as_dict()
assert j is not None
assert d is not None
assert u.user_id.value is not None
u.validate()
u.verify_all_publishers(u)
def test_same_fake_user(self):
a = fake_profile.FakeUser(seed=1337)
b = fake_profile.FakeUser(seed=1337)
c = fake_profile.FakeUser(seed=23)
assert a.uuid.value == b.uuid.value
assert a.uuid.value != c.uuid.value
def test_batch_create(self):
profiles = fake_profile.batch_create_fake_profiles(seed=1337, number=3)
assert len(profiles) == 3
for i, p in enumerate(profiles, 1):
assert p is not None
assert p["access_information"]["hris"]["values"]["employee_id"] == i
def test_with_and_without_uuid(self):
c_with_uuid = fake_profile.FakeProfileConfig().uuid_username()
c_without_uuid = fake_profile.FakeProfileConfig()
a = fake_profile.FakeUser(seed=23, config=c_with_uuid)
assert a.uuid.value is not None
b = fake_profile.FakeUser(seed=23, config=c_without_uuid)
assert b.uuid.value is None
def test_null_create_profile(self):
empty_profile = profile.User()
create_profile = fake_profile.FakeUser(
seed=1337, config=fake_profile.FakeProfileConfig().default().minimal().create().no_display()
)
update_profile = fake_profile.FakeUser(
seed=1337, config=fake_profile.FakeProfileConfig().minimal().default().no_display()
)
update_profile.uuid = create_profile.uuid
update_profile.user_id = create_profile.user_id
update_profile.primary_username.value = "test" # invalid because default publisher is cis
update_profile.primary_username.signature.publisher.name = "mozilliansorg"
with pytest.raises(exceptions.PublisherVerificationFailure):
update_profile.verify_all_publishers(empty_profile)
assert create_profile.verify_all_publishers(empty_profile) is True
assert update_profile.verify_all_publishers(create_profile) is True
def test_fake_dynamo_flat_dict_output(self):
# This profile must have ldap groups and staff data
p = fake_profile.FakeUser(seed=44)
ddb = p.as_dynamo_flat_dict()
deserializer = TypeDeserializer()
res = {k: deserializer.deserialize(v) for k, v in ddb.items()}
assert len(res["access_information"]["ldap"]) > 0
assert len(res["staff_information"]["office_location"]) > 0
|
mpl-2.0
|
mozilla-iam/cis
|
python-modules/cis_crypto/cis_crypto/cli.py
|
1
|
3035
|
#!/usr/bin/env python3
import argparse
import jose
import logging
import sys
from cis_crypto import common
from cis_crypto import operation
class cli:
def __init__(self):
self.config = None
self.prog = sys.argv[0].split("/")[-1]
def parse_args(self, args):
parser = argparse.ArgumentParser(
description="""
Command line wrapper for mozilla-iam sign verify/operations of JSON and YAML using JWKS.
"""
)
subparsers = parser.add_subparsers(dest="cryptographic-operation")
subparsers.required = True
sign_operation_parser = subparsers.add_parser(
"sign", help="Use a jwks key to generate a signature for a file. (Assumes a json or yaml file)"
)
sign_operation_parser.add_argument(
"--file", help="The path to the file you would like to sign. (Assumes a json or yaml file)"
)
sign_operation_parser.set_defaults(func="sign_operation")
verify_operation_parser = subparsers.add_parser(
"verify", help="Verify a signture with a known file. (Assumes a json file)"
)
verify_operation_parser.add_argument("--file", help="The path to the file you would like to sign.")
verify_operation_parser.set_defaults(func="verify_operation")
return parser.parse_args(args)
def run(self):
logger = logging.getLogger(__name__)
self.config = self.parse_args(sys.argv[1:])
if self.config.func == "sign_operation":
logger.info("Attempting to sign file: {}".format(self.config.file))
file_content = common.load_file(self.config.file)
signing_object = operation.Sign()
signing_object.load(file_content)
jws = signing_object.jws()
common.write_file(jws, "{}.jws".format(self.config.file))
logger.info("File signed. Your signed file is now: {}.jws".format(self.config.file))
logger.info("To verify this file use cis_crypto verify --file {}.jws".format(self.config.file))
elif self.config.func == "verify_operation":
logger.info("Attempting verification of signature for file: {}".format(self.config.file))
everett_config = common.get_config()
logger.info(
"Attempting fetch of .well-known data from: {}".format(
everett_config("public_key_name", namespace="cis", default="access-file-key.pub.pem")
)
)
file_content = common.load_file(self.config.file)
verify_object = operation.Verify()
verify_object.load(file_content)
try:
jws = verify_object.jws() # This will raise if the signature is invalid.
logger.info("Signature verified for file: {}".format(self.config.file))
except jose.exceptions.JWSError:
logger.error("The signature could not be verified.")
sys.exit()
sys.exit()
|
mpl-2.0
|
mozilla-iam/cis
|
python-modules/cis_notifications/cis_notifications/event.py
|
1
|
5547
|
import logging
import time
import requests
from cis_notifications import common
from cis_notifications import secret
logger = logging.getLogger(__name__)
def expired(ts, leeway=0):
return ts < time.time() + leeway
class Event(object):
"""Handle events from lambda and generate hooks out to publishers."""
def __init__(self, event):
"""[summary]
Arguments:
object {[type]} -- [an instance of the event class.]
event {[type]} -- [the event as ingested from the kinesis stream.]
subscriptions {[type]} -- [list of urls to post notifications to.]
"""
self.config = common.get_config()
self.event = event
self.secret_manager = secret.Manager()
self.access_token = None
def to_notification(self):
"""[summary]
Transform the instance of the event from the stream into a notification payload.
[return] JSON data structure to send using requests.
"""
logger.debug("An event was received", extra={"event": self.event})
updated_record = self.event.get("dynamodb")
operation = "foxy" # Just a place holder in case we have an unhandled event.
if self.event.get("eventName") == "INSERT":
operation = "create"
if self.event.get("eventName") == "MODIFY":
operation = "update"
if self.event.get("eventName") == "REMOVE":
operation = "delete"
if updated_record is not None:
# Provided the event is the structure that
notification = {
"operation": operation,
"id": updated_record["Keys"]["id"]["S"],
"time": updated_record["ApproximateCreationDateTime"],
}
logger.debug("Notification generated.", extra={"notification": notification})
return notification
else:
logger.debug("No notification generated.")
return {}
def send(self, notification):
"""[summary]
Get the list of notification endpoints from the object constructor and send a POST with the json payload.
Arguments:
object {[type]} -- [an instance of the event class.]
object {[notification]} -- [A json payload that you would like to send to the RP.]
[return] Dictionary of status codes by publisher.
"""
# Not in-memory access token?
if not self.access_token:
# Load whatever is in our secrets
self.access_token_dict = self.secret_manager.secretmgr("az_access_token")
# Check if what we had in secrets is still valid!
# This includes 10s leeway for clock sync issues and 15min (900s) for max-lambda function time.
# Since tokens are normally valid for 86400s (1 day) that should accomodate for all cases. If these were to
# be less than 15min for any reason, it would simply bypass the cache
if expired(float(self.access_token_dict.get("exp", 0.0)), leeway=910):
logger.info("Access token has expired, refreshing")
authzero = self._get_authzero_client()
self.access_token_dict = authzero.exchange_for_access_token()
# Auth0 gives us the difference (expires_in) not a time stamp, so we need to calculate when the token
# expires.
self.access_token_dict["exp"] = time.time() + float(self.access_token_dict.get("expires_in", 60.0))
self.secret_manager.secretmgr_store("az_access_token", self.access_token_dict)
else:
logger.info("Re-using cached access token")
self.access_token = self.access_token_dict["access_token"]
if notification != {}:
rp_urls = self.config(
"rp_urls", namespace="cis", default="https://dinopark.k8s.dev.sso.allizom.org/events/update"
)
results = {}
for url in rp_urls.split(","):
result = self._notify_via_post(url, notification, self.access_token)
results[url] = result
return results
def _get_authzero_client(self):
authzero = secret.AuthZero(
client_id=self.secret_manager.secret("client_id"),
client_secret=self.secret_manager.secret("client_secret"),
api_identifier=self.config("api_identifier", namespace="cis", default="hook.dev.sso.allizom.org"),
authzero_tenant=self.config("authzero_tenant", namespace="cis", default="auth.mozilla.auth0.com"),
)
return authzero
def _notify_via_post(self, url, json_payload, access_token):
"""[summary]
Notify a single publisher of the user_id that was updated and return only the status code.
Arguments:
url {[type]} -- [the url of the publisher you woud like to notify.]
json_payload {[type]} -- [the event to send to the publisher.]
"""
try:
response = requests.post(
url, json=json_payload, headers={"authorization": "Bearer {}".format(access_token)}
)
return response.status_code
except requests.exceptions.RequestException:
return "Unknown"
except requests.exceptions.HTTPError:
return "HTTPError"
except requests.exceptions.ConnectionError:
return "ConnectionError"
except requests.exceptions.Timeout:
return "Timeout"
|
mpl-2.0
|
mozilla-iam/cis
|
python-modules/cis_logger/setup.py
|
1
|
1188
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open("README.md", "r") as fh:
long_description = fh.read()
requirements = ["python-json-logger", "everett", "everett[ini]"]
test_requirements = ["pytest", "pytest-watch", "pytest-cov", "flake8", "flask", "flask_graphql", "flask_restful"]
setup_requirements = ["pytest-runner", "setuptools>=40.5.0"]
extras = {"test": test_requirements}
setup(
name="cis_logger",
version="0.0.1",
author="Andrew Krug",
author_email="[email protected]",
description="Mozilla IAM logger wrapper.",
long_description=long_description,
url="https://github.com/mozilla-iam/cis",
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
"Operating System :: OS Independent",
],
install_requires=requirements,
license="Mozilla Public License 2.0",
include_package_data=True,
packages=find_packages(include=["cis_logger"]),
setup_requires=setup_requirements,
tests_require=test_requirements,
extras_require=extras,
test_suite="tests",
zip_safe=True,
)
|
mpl-2.0
|
mozilla-iam/cis
|
python-modules/cis_crypto/cis_crypto/secret.py
|
1
|
5055
|
"""Class for following a default provider chain in the fetching of key material for sign/verify operations."""
import boto3
import json
import os
import logging
import time
from cis_crypto import common
from jose import jwk
from botocore.exceptions import ClientError
logger = logging.getLogger(__name__)
class Manager(object):
"""Top level manager object. Will instantiate the appropriate provider based on configuration."""
def __init__(self, provider_type):
self.provider_type = provider_type
def get_key(self, key_name):
provider = self._load_provider()
return provider.key(key_name)
def _load_provider(self):
logger.debug("Using secret manager provider type: {}".format(self.provider_type))
if self.provider_type.lower() == "file":
return FileProvider()
elif self.provider_type.lower() == "aws-ssm":
return AWSParameterstoreProvider()
else:
raise ValueError("The secret provider selected is not yet supported file|aws-ssm are currently available.")
class FileProvider(object):
"""Support loading key material from disk."""
def key(self, key_name):
"""Takes key_name returns bytes"""
config = common.get_config()
key_dir = config(
"secret_manager_file_path",
namespace="cis",
default=("{}/.mozilla-iam/keys/".format(os.path.expanduser("~"))),
)
file_name = "{}".format(key_name)
logger.debug("Secret manager file provider loading key file: {}/{}".format(key_dir, key_name))
fh = open((os.path.join(key_dir, file_name)), "rb")
key_content = fh.read()
key_construct = jwk.construct(key_content, "RS256")
return key_construct
class AWSParameterstoreProvider(object):
"""Support loading secure strings from AWS parameter store."""
def __init__(self):
self.config = common.get_config()
self.region_name = self.config("secret_manager_ssm_region", namespace="cis", default="us-west-2")
self.boto_session = boto3.session.Session(region_name=self.region_name)
self.ssm_client = self.boto_session.client("ssm")
self._cache = {}
def key(self, key_name):
retries = 30
backoff = 1
result = None
if "key_construct" in self._cache:
logger.debug("Returning memory-cached version of the parameter key_construct")
return self._cache["key_construct"]
while result is None:
try:
ssm_namespace = self.config("secret_manager_ssm_path", namespace="cis", default="/iam")
logger.debug("Secret manager SSM provider loading key: {}/{}".format(ssm_namespace, key_name))
ssm_response = self.ssm_client.get_parameter(
Name="{}/{}".format(ssm_namespace, key_name), WithDecryption=True
)
result = ssm_response.get("Parameter")
except ClientError as e:
retries = retries - 1
backoff = backoff + 1
time.sleep(backoff)
logger.debug(
"Backing-off: fetch secret ({}) due to: {} retries {} backoff {}".format(
key_name, e, retries, backoff
)
)
if retries <= 0:
break
if result is None:
logger.error("Failed to fetch secret ({}) due to: retries {} backoff {}".format(key_name, retries, backoff))
try:
key_dict = json.loads(result.get("Value"))
key_construct = jwk.construct(key_dict, "RS256")
except json.decoder.JSONDecodeError:
key_construct = jwk.construct(result.get("Value"), "RS256")
self._cache["key_construct"] = key_construct
return key_construct
def uuid_salt(self):
retries = 30
backoff = 1
result = None
if "uuid_salt" in self._cache:
logger.debug("Returning memory-cached version of uuid_salt")
return self._cache["uuid_salt"]
while result is None:
try:
ssm_path = self.config("secret_manager_ssm_uuid_salt", namespace="cis", default="/iam")
logger.debug("Secret manager SSM provider loading uuid_salt: {}".format(ssm_path))
ssm_response = self.ssm_client.get_parameter(Name=ssm_path, WithDecryption=True)
result = ssm_response.get("Parameter").get("Value")
except ClientError as e:
retries = retries - 1
backoff = backoff + 1
logger.debug("Backing-off: fetch secret due to: {} retries {} backoff {}".format(e, retries, backoff))
time.sleep(backoff)
if retries <= 0:
break
if result is None:
logger.error("Failed to fetch uuid_salt due to: retries {} backoff {}".format(retries, backoff))
self._cache["uuid_salt"] = result
return result
|
mpl-2.0
|
mozilla-iam/cis
|
python-modules/cis_profile_retrieval_service/cis_profile_retrieval_service/schema.py
|
1
|
1800
|
import json
import graphene
import cis_profile.graphene
from cis_identity_vault.models import user
from cis_profile_retrieval_service.common import get_table_resource
def is_json(payload):
"""Check if a payload is valid JSON."""
try:
json.loads(payload)
except (TypeError, ValueError):
return False
else:
return True
class Query(graphene.ObjectType):
"""GraphQL Query class for the V2 Profiles."""
profiles = graphene.List(cis_profile.graphene.Profile, primaryEmail=graphene.String(required=False))
profile = graphene.Field(cis_profile.graphene.Profile, userId=graphene.String(required=True))
def resolve_profiles(self, info, **kwargs):
"""GraphQL resolver for the profiles attribute."""
table = get_table_resource()
vault = user.Profile(table)
profiles = []
if kwargs.get("primaryEmail"):
search = vault.find_by_email(kwargs.get("primaryEmail"))
if len(search.get("Items")) > 0:
for profile in search.get("Items"):
profiles.append(json.loads())
else:
for vault_profile in vault.all:
profiles.append(json.loads(vault_profile.get("profile")))
def resolve_profile(self, info, **kwargs):
"""GraphQL resolver for a single profile."""
table = get_table_resource()
vault = user.Profile(table)
if kwargs.get("userId"):
search = vault.find_by_id(kwargs.get("userId"))
if len(search.get("Items")) > 0:
resp = search["Items"][0]["profile"]
else:
resp = json.dumps({})
return resp
class AuthorizationMiddleware:
def resolve(self, next, root, info, **kwargs):
return next(root, info, **kwargs)
|
mpl-2.0
|
mozilla-iam/cis
|
python-modules/cis_profile/tests/test_well_known.py
|
1
|
1091
|
from cis_profile.common import WellKnown
from cis_profile.profile import User
import os
class Test_WellKnown(object):
def test_wellknown_file_force(self):
wk = WellKnown(always_use_local_file=True)
data = wk.get_well_known()
assert isinstance(data, dict)
assert isinstance(data.get("api"), dict)
def test_wellknown_retrieve(self):
wk = WellKnown()
data = wk.get_well_known()
assert isinstance(data, dict)
assert isinstance(data.get("api"), dict)
def test_schema_retrieve(self):
wk = WellKnown()
data = wk.get_schema()
assert isinstance(data, dict)
def test_rules_retrieve(self):
wk = WellKnown()
data = wk.get_publisher_rules()
assert isinstance(data, dict)
assert isinstance(data.get("create"), dict)
def test_profile_env(self):
os.environ["CIS_DISCOVERY_URL"] = "https://auth.allizom.org/.well-known/mozilla-iam"
u = User()
assert u._User__well_known.discovery_url == "https://auth.allizom.org/.well-known/mozilla-iam"
|
mpl-2.0
|
mozilla-iam/cis
|
python-modules/cis_crypto/setup.py
|
1
|
1388
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open("README.md", "r") as fh:
long_description = fh.read()
requirements = [
"python-jose[cryptography]",
"cryptography",
"everett",
"everett[ini]",
"configobj",
"boto3",
"boto",
"botocore",
"requests",
"pyaml",
]
setup_requirements = ["pytest-runner", "setuptools>=40.5.0"]
test_requirements = ["pytest", "pytest-watch", "pytest-cov", "pytest-mock", "moto", "mock", "flake8", "cis_profile"]
extras = {"test": test_requirements}
setup(
name="cis_crypto",
version="0.0.1",
author="Andrew Krug",
author_email="[email protected]",
description="Per attribute signature system for jwks sign-verify in mozilla-iam.",
long_description=long_description,
url="https://github.com/mozilla-iam/cis",
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Mozilla Public License",
"Operating System :: OS Independent",
],
install_requires=requirements,
license="Mozilla Public License 2.0",
include_package_data=True,
packages=find_packages(include=["cis_crypto", "bin"]),
scripts=["bin/cis_crypto"],
setup_requires=setup_requirements,
test_suite="tests",
tests_require=test_requirements,
extras_require=extras,
zip_safe=False,
)
|
mpl-2.0
|
mozilla-iam/cis
|
python-modules/cis_change_service/tests/test_api.py
|
1
|
18809
|
import json
import logging
import mock
import os
import random
import subprocess
import string
import cis_profile
from cis_profile import common
from cis_profile import FakeUser
from cis_profile.fake_profile import FakeProfileConfig
from cis_profile import profile
from datetime import datetime
from datetime import timedelta
from tests.fake_auth0 import FakeBearer
from tests.fake_auth0 import json_form_of_pk
logging.basicConfig(level=logging.INFO, format="%(asctime)s:%(levelname)s:%(name)s:%(message)s")
logging.getLogger("boto").setLevel(logging.CRITICAL)
logging.getLogger("boto3").setLevel(logging.CRITICAL)
logging.getLogger("botocore").setLevel(logging.CRITICAL)
logging.getLogger("cis_profile.profile").setLevel(logging.DEBUG)
logger = logging.getLogger(__name__)
def get_complex_structures():
return ["staff_information", "access_information", "identities", "schema"]
def ensure_appropriate_publishers_and_sign(fake_profile, publisher_rules, condition):
os.environ["CIS_SECRET_MANAGER"] = "file"
os.environ["CIS_SIGNING_KEY_NAME"] = "signing_key_name=fake-publisher-key_0.priv.pem"
temp_profile = fake_profile
complex_structures = get_complex_structures()
for attr in publisher_rules[condition]:
if attr == "primary_username" and temp_profile[attr]["value"] == "None":
temp_profile[attr]["value"] = "".join(
[random.choice(string.ascii_letters + string.digits) for n in range(32)]
)
if attr not in complex_structures:
successful_random_publisher = random.choice(publisher_rules[condition][attr])
temp_profile[attr]["signature"]["publisher"]["name"] = successful_random_publisher
u = profile.User(user_structure_json=temp_profile)
# Don't sign NULL attributes or invalid publishers
if u._attribute_value_set(temp_profile[attr], strict=True) and (
temp_profile[attr]["signature"]["publisher"]["name"] == successful_random_publisher
):
u.sign_attribute(attr, successful_random_publisher)
temp_profile = u.as_dict()
else:
if attr != "schema" and attr in complex_structures:
for k in temp_profile[attr]:
try:
successful_random_publisher = random.choice(publisher_rules[condition][attr])
except KeyError:
successful_random_publisher = random.choice(publisher_rules[condition][attr][k])
temp_profile[attr][k]["signature"]["publisher"]["name"] = successful_random_publisher
u = profile.User(user_structure_json=temp_profile)
attribute = "{}.{}".format(attr, k)
# Don't sign NULL attributes or invalid publishers
if u._attribute_value_set(temp_profile[attr][k], strict=True) and (
temp_profile[attr][k]["signature"]["publisher"]["name"] == successful_random_publisher
):
u.sign_attribute(attribute, successful_random_publisher)
temp_profile = u.as_dict()
return profile.User(user_structure_json=temp_profile)
class TestAPI(object):
def setup(self):
self.dynalite_port = str(random.randint(32200, 32300))
os.environ["CIS_CONFIG_INI"] = "tests/mozilla-cis.ini"
os.environ["AWS_XRAY_SDK_ENABLED"] = "false"
os.environ["CIS_ENVIRONMENT"] = "local"
os.environ["CIS_DYNALITE_PORT"] = self.dynalite_port
os.environ["CIS_REGION_NAME"] = "us-west-2"
os.environ["AWS_ACCESS_KEY_ID"] = "foo"
os.environ["AWS_SECRET_ACCESS_KEY"] = "bar"
from cis_identity_vault import vault
from cis_change_service.common import get_config
self.patcher_salt = mock.patch("cis_crypto.secret.AWSParameterstoreProvider.uuid_salt")
self.mock_salt = self.patcher_salt.start()
config = get_config()
os.environ["CIS_DYNALITE_PORT"] = str(random.randint(32300, 32400))
self.dynalite_port = config("dynalite_port", namespace="cis")
self.dynaliteprocess = subprocess.Popen(
[
"/usr/sbin/java",
"-Djava.library.path=/opt/dynamodb_local/DynamoDBLocal_lib",
"-jar",
"/opt/dynamodb_local/DynamoDBLocal.jar",
"-inMemory",
"-port",
self.dynalite_port,
],
preexec_fn=os.setsid,
)
v = vault.IdentityVault()
v.connect()
v.create()
user_profile = FakeUser(config=FakeProfileConfig().minimal())
self.user_profile = user_profile.as_json()
from cis_change_service import api
api.app.testing = True
self.app = api.app.test_client()
self.publisher_rules = common.WellKnown().get_publisher_rules()
self.complex_structures = get_complex_structures()
def test_index_exists(self):
result = self.app.get("/v2", follow_redirects=True)
assert result.status_code == 200
@mock.patch("cis_change_service.idp.get_jwks")
def test_stream_bypass_publishing_mode_it_should_succeed(self, fake_jwks):
os.environ["CIS_STREAM_BYPASS"] = "true"
os.environ["AWS_XRAY_SDK_ENABLED"] = "false"
os.environ["CIS_ENVIRONMENT"] = "local"
os.environ["CIS_DYNALITE_PORT"] = self.dynalite_port
os.environ["CIS_REGION_NAME"] = "us-west-2"
from cis_change_service import api
f = FakeBearer()
fake_jwks.return_value = json_form_of_pk
token = f.generate_bearer_without_scope()
api.app.testing = True
self.app = api.app.test_client()
result = self.app.post(
"/v2/user",
headers={"Authorization": "Bearer " + token},
data=json.dumps(self.user_profile),
content_type="application/json",
follow_redirects=True,
)
json.loads(result.get_data())
assert result.status_code == 200
@mock.patch("cis_change_service.idp.get_jwks")
def test_change_endpoint_fails_with_invalid_token_and_jwt_validation_false(self, fake_jwks):
os.environ["CIS_CONFIG_INI"] = "tests/mozilla-cis.ini"
os.environ["AWS_XRAY_SDK_ENABLED"] = "false"
os.environ["CIS_ENVIRONMENT"] = "local"
os.environ["CIS_DYNALITE_PORT"] = self.dynalite_port
os.environ["CIS_REGION_NAME"] = "us-west-2"
from cis_change_service import api
os.environ["CIS_JWT_VALIDATION"] = "false"
f = FakeBearer()
bad_claims = {
"iss": "https://auth-dev.mozilla.auth0.com/",
"sub": "mc1l0G4sJI2eQfdWxqgVNcRAD9EAgHib@clients",
"aud": "https://hacks",
"iat": (datetime.utcnow() - timedelta(seconds=3100)).strftime("%s"),
"exp": (datetime.utcnow() - timedelta(seconds=3100)).strftime("%s"),
"scope": "read:allthething",
"gty": "client-credentials",
}
fake_jwks.return_value = json_form_of_pk
token = f.generate_bearer_with_scope("read:profile", bad_claims)
api.app.testing = True
self.app = api.app.test_client()
result = self.app.get(
"/v2/user",
headers={"Authorization": "Bearer " + token},
data=json.dumps(self.user_profile),
content_type="application/json",
follow_redirects=True,
)
assert result.status_code == 200
@mock.patch("cis_change_service.idp.get_jwks")
def test_wrong_publisher(self, fake_jwks):
"""
This verifies a wrong-publisher can't update
it creates a valid user, then wrongly modify an attribute its not allowed to
"""
os.environ["CIS_CONFIG_INI"] = "tests/mozilla-cis-verify.ini"
os.environ["AWS_XRAY_SDK_ENABLED"] = "false"
os.environ["CIS_ENVIRONMENT"] = "local"
os.environ["CIS_DYNALITE_PORT"] = self.dynalite_port
os.environ["CIS_REGION_NAME"] = "us-east-1"
os.environ["AWS_ACCESS_KEY_ID"] = "foo"
os.environ["AWS_SECRET_ACCESS_KEY"] = "bar"
os.environ["DEFAULT_AWS_REGION"] = "us-east-1"
os.environ["CIS_VERIFY_SIGNATURES"] = "true"
os.environ["CIS_VERIFY_PUBLISHERS"] = "true"
from cis_change_service import api
fake_new_user = FakeUser(config=FakeProfileConfig().minimal().no_display())
# Create a brand new user
patched_user_profile = ensure_appropriate_publishers_and_sign(
fake_new_user.as_dict(), self.publisher_rules, "create"
)
# Ensure a first_name is set as we'll use that for testing
patched_user_profile.first_name.value = "test"
patched_user_profile.first_name.signature.publisher.name = "ldap"
patched_user_profile.first_name.metadata.display = "public"
patched_user_profile.sign_attribute("first_name", "ldap")
f = FakeBearer()
fake_jwks.return_value = json_form_of_pk
token = f.generate_bearer_without_scope()
api.app.testing = True
self.app = api.app.test_client()
result = self.app.post(
"/v2/user",
headers={"Authorization": "Bearer " + token},
data=json.dumps(patched_user_profile.as_json()),
content_type="application/json",
follow_redirects=True,
)
response = json.loads(result.get_data())
assert result.status_code == 200
assert response["condition"] == "create"
# sign first_name again but with wrong publisher (but same value as before)
new_user = cis_profile.User(user_id=patched_user_profile.user_id.value)
new_user.first_name = patched_user_profile.first_name
new_user.sign_attribute("first_name", "access_provider")
result = self.app.post(
"/v2/user",
headers={"Authorization": "Bearer " + token},
data=json.dumps(new_user.as_json()),
content_type="application/json",
follow_redirects=True,
)
response = json.loads(result.get_data())
assert response["status_code"] == 202
# sign first_name again but with wrong publisher and different display (but same value as before)
new_user = cis_profile.User(user_id=patched_user_profile.user_id.value)
new_user.first_name = patched_user_profile.first_name
new_user.first_name.metadata.display = "staff"
new_user.sign_attribute("first_name", "access_provider")
result = self.app.post(
"/v2/user",
headers={"Authorization": "Bearer " + token},
data=json.dumps(new_user.as_json()),
content_type="application/json",
follow_redirects=True,
)
response = json.loads(result.get_data())
assert response["code"] == "invalid_publisher"
# sign first_name again but with wrong publisher and wrong value (it should fail)
new_user.first_name.value = "new-test"
new_user.sign_attribute("first_name", "access_provider")
result = self.app.post(
"/v2/user",
headers={"Authorization": "Bearer " + token},
data=json.dumps(new_user.as_json()),
content_type="application/json",
follow_redirects=True,
)
response = json.loads(result.get_data())
assert result.status_code != 200
@mock.patch("cis_change_service.idp.get_jwks")
def test_partial_update_with_dynamo_down_it_should_fail(self, fake_jwks):
os.environ["CIS_CONFIG_INI"] = "tests/mozilla-cis.ini"
os.environ["AWS_XRAY_SDK_ENABLED"] = "false"
os.environ["CIS_ENVIRONMENT"] = "local"
os.environ["CIS_DYNALITE_PORT"] = self.dynalite_port
os.environ["CIS_REGION_NAME"] = "us-east-1"
os.environ["AWS_ACCESS_KEY_ID"] = "foo"
os.environ["AWS_SECRET_ACCESS_KEY"] = "bar"
os.environ["DEFAULT_AWS_REGION"] = "us-east-1"
from cis_change_service import api
fake_new_user = FakeUser(config=FakeProfileConfig().minimal())
# Create a brand new user
patched_user_profile = ensure_appropriate_publishers_and_sign(
fake_new_user.as_dict(), self.publisher_rules, "create"
)
f = FakeBearer()
fake_jwks.return_value = json_form_of_pk
token = f.generate_bearer_without_scope()
api.app.testing = True
self.app = api.app.test_client()
result = self.app.post(
"/v2/user",
headers={"Authorization": "Bearer " + token},
data=json.dumps(patched_user_profile.as_json()),
content_type="application/json",
follow_redirects=True,
)
response = json.loads(result.get_data())
assert result.status_code == 200
assert response["condition"] == "create"
logger.info("A stub user has been created and verified to exist.")
os.environ["CIS_DYNALITE_PORT"] = "31337" # bad port
logger.info("Attempting failing partial update.")
null_profile = profile.User(user_structure_json=None)
null_profile.alternative_name.value = "iamanewpreferredlastname"
null_profile.sign_attribute("alternative_name", "mozilliansorg")
null_profile.user_id.value = "ad|wrong|LDAP"
null_profile.active.value = True
null_profile.sign_attribute("active", "access_provider")
try:
result = self.app.post(
"/v2/user?user_id={}".format("mismatching_user_id"),
headers={"Authorization": "Bearer " + token},
data=json.dumps(null_profile.as_json()),
content_type="application/json",
follow_redirects=True,
)
response = json.loads(result.get_data())
assert result.status_code == 500
except Exception as e:
logger.info(f"Exception was correctly raised because DynamoDB was unavailable: {e}")
else:
logger.warning("Exception was not raised, test has failed")
assert False
@mock.patch("cis_change_service.idp.get_jwks")
def test_partial_update_it_should_fail(self, fake_jwks):
os.environ["CIS_CONFIG_INI"] = "tests/mozilla-cis.ini"
os.environ["AWS_XRAY_SDK_ENABLED"] = "false"
os.environ["CIS_ENVIRONMENT"] = "local"
os.environ["CIS_DYNALITE_PORT"] = self.dynalite_port
os.environ["CIS_REGION_NAME"] = "us-east-1"
os.environ["AWS_ACCESS_KEY_ID"] = "foo"
os.environ["AWS_SECRET_ACCESS_KEY"] = "bar"
os.environ["DEFAULT_AWS_REGION"] = "us-east-1"
from cis_change_service import api
fake_new_user = FakeUser(config=FakeProfileConfig().minimal())
# Create a brand new user
patched_user_profile = ensure_appropriate_publishers_and_sign(
fake_new_user.as_dict(), self.publisher_rules, "create"
)
f = FakeBearer()
fake_jwks.return_value = json_form_of_pk
token = f.generate_bearer_without_scope()
api.app.testing = True
self.app = api.app.test_client()
result = self.app.post(
"/v2/user",
headers={"Authorization": "Bearer " + token},
data=json.dumps(patched_user_profile.as_json()),
content_type="application/json",
follow_redirects=True,
)
response = json.loads(result.get_data())
assert result.status_code == 200
assert response["condition"] == "create"
logger.info("A stub user has been created and verified to exist.")
logger.info("Attempting failing partial update.")
null_profile = profile.User(user_structure_json=None)
null_profile.alternative_name.value = "iamanewpreferredlastname"
null_profile.sign_attribute("alternative_name", "mozilliansorg")
null_profile.user_id.value = "ad|wrong|LDAP"
null_profile.active.value = True
null_profile.sign_attribute("active", "access_provider")
result = self.app.post(
"/v2/user?user_id={}".format("mismatching_user_id"),
headers={"Authorization": "Bearer " + token},
data=json.dumps(null_profile.as_json()),
content_type="application/json",
follow_redirects=True,
)
response = json.loads(result.get_data())
assert result.status_code == 400
@mock.patch("cis_change_service.idp.get_jwks")
def test_partial_update_it_should_succeed(self, fake_jwks):
os.environ["CIS_STREAM_BYPASS"] = "true"
os.environ["AWS_XRAY_SDK_ENABLED"] = "false"
os.environ["CIS_VERIFY_PUBLISHERS"] = "true"
from cis_change_service import api
fake_new_user = FakeUser(config=FakeProfileConfig().minimal())
# Create a brand new user
patched_user_profile = ensure_appropriate_publishers_and_sign(
fake_new_user.as_dict(), self.publisher_rules, "create"
)
f = FakeBearer()
fake_jwks.return_value = json_form_of_pk
token = f.generate_bearer_without_scope()
api.app.testing = True
self.app = api.app.test_client()
result = self.app.post(
"/v2/user",
headers={"Authorization": "Bearer " + token},
data=json.dumps(patched_user_profile.as_json()),
content_type="application/json",
follow_redirects=True,
)
response = json.loads(result.get_data())
assert result.status_code == 200
assert response["condition"] == "create"
logger.info("A stub user has been created and verified to exist.")
logger.info("Attempting partial update.")
# Now let's try a partial update :)
null_profile = profile.User(user_structure_json=None)
null_profile.active.value = True
null_profile.sign_attribute("active", "access_provider")
null_profile.last_name.value = "iamanewpreferredlastname"
null_profile.sign_attribute("last_name", "mozilliansorg")
result = self.app.post(
"/v2/user?user_id={}".format(patched_user_profile.user_id.value),
headers={"Authorization": "Bearer " + token},
data=json.dumps(null_profile.as_json()),
content_type="application/json",
follow_redirects=True,
)
logger.info(result.get_data())
response = json.loads(result.get_data())
assert result.status_code == 200
assert response["condition"] == "update"
def teardown(self):
os.killpg(os.getpgid(self.dynaliteprocess.pid), 15)
self.patcher_salt.stop()
|
mpl-2.0
|
ibm-watson-iot/iot-python
|
test/test_api_registry_devices_ext.py
|
2
|
3189
|
# *****************************************************************************
# Copyright (c) 2019 IBM Corporation and other Contributors.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Eclipse Public License v1.0
# which accompanies this distribution, and is available at
# http://www.eclipse.org/legal/epl-v10.html
# *****************************************************************************
import uuid
import time
from datetime import datetime
import testUtils
import wiotp.sdk.device
from wiotp.sdk.api.registry.devices import DeviceUid, DeviceInfo, DeviceCreateRequest, DeviceLocation, LogEntry
from wiotp.sdk.exceptions import ApiException
class TestRegistryDevices(testUtils.AbstractTest):
def testDeviceLocationGetAndUpdate(self, deviceType, device):
assert device.deviceId in deviceType.devices
locationBefore = device.getLocation()
assert locationBefore is None
device.setLocation({"latitude": 50, "longitude": 60})
locationAfter = device.getLocation()
assert locationAfter.updatedDateTime is not None
assert locationAfter.measuredDateTime is not None
assert isinstance(locationAfter.updatedDateTime, datetime)
assert isinstance(locationAfter.measuredDateTime, datetime)
assert locationAfter.latitude == 50
assert locationAfter.longitude == 60
device.setLocation(DeviceLocation(latitude=80, longitude=75))
locationAfter = device.getLocation()
assert locationAfter.latitude == 80
assert locationAfter.longitude == 75
def testDeviceLocationInvalid(self, deviceType, device):
assert device.deviceId in deviceType.devices
locationBefore = device.getLocation()
assert locationBefore is None
try:
device.setLocation(DeviceLocation(latitude=100, longitude=120))
except ApiException as e:
assert e.id == "CUDHT0300I"
assert len(e.violations) == 1
def testDeviceMgmt(self, deviceType, device):
assert device.deviceId in deviceType.devices
mgmtInfo = device.getMgmt()
assert mgmtInfo is None
def testDeviceConnectionLogs(self, deviceType, device, authToken):
assert device.deviceId in deviceType.devices
options = {
"identity": {"orgId": self.ORG_ID, "typeId": device.typeId, "deviceId": device.deviceId},
"auth": {"token": authToken},
}
deviceClient = wiotp.sdk.device.DeviceClient(options)
deviceClient.connect()
time.sleep(10)
deviceClient.disconnect()
deviceClient.connect()
time.sleep(10)
deviceClient.disconnect()
# Allow 30 seconds for the logs to make it through
time.sleep(30)
connLogs = device.getConnectionLogs()
# There may be more than 2 entries due to previous connection attempts if we re-used a device ID. But there should be at least two!
assert len(connLogs) >= 2
for entry in connLogs:
assert isinstance(entry, LogEntry)
assert isinstance(entry.timestamp, datetime)
|
epl-1.0
|
ibm-watson-iot/iot-python
|
src/wiotp/sdk/device/client.py
|
2
|
4102
|
# *****************************************************************************
# Copyright (c) 2014, 2018 IBM Corporation and other Contributors.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Eclipse Public License v1.0
# which accompanies this distribution, and is available at
# http://www.eclipse.org/legal/epl-v10.html
# *****************************************************************************
from datetime import datetime
import json
import logging
import threading
import paho.mqtt.client as paho
import pytz
from wiotp.sdk import (
AbstractClient,
ConfigurationException,
ConnectionException,
MissingMessageEncoderException,
InvalidEventException,
)
from wiotp.sdk.device.command import Command
from wiotp.sdk.device.config import DeviceClientConfig
class DeviceClient(AbstractClient):
"""
Extends #wiotp.common.AbstractClient to implement a device client supporting
messaging over MQTT
# Parameters
options (dict): Configuration options for the client
logHandlers (list<logging.Handler>): Log handlers to configure. Defaults to `None`,
which will result in a default log handler being created.
"""
_COMMAND_TOPIC = "iot-2/cmd/+/fmt/+"
def __init__(self, config, logHandlers=None):
self._config = DeviceClientConfig(**config)
AbstractClient.__init__(
self,
domain=self._config.domain,
organization=self._config.orgId,
clientId=self._config.clientId,
username=self._config.username,
password=self._config.password,
port=self._config.port,
transport=self._config.transport,
cleanStart=self._config.cleanStart,
sessionExpiry=self._config.sessionExpiry,
keepAlive=self._config.keepAlive,
caFile=self._config.caFile,
logLevel=self._config.logLevel,
logHandlers=logHandlers,
)
# Add handler for commands if not connected to QuickStart
if not self._config.isQuickstart():
self.client.message_callback_add("iot-2/cmd/+/fmt/+", self._onCommand)
# Initialize user supplied callback
self.commandCallback = None
# Register startup subscription list (only for non-Quickstart)
if not self._config.isQuickstart():
self._subscriptions[self._COMMAND_TOPIC] = 1
def publishEvent(self, eventId, msgFormat, data, qos=0, onPublish=None):
"""
Publish an event to Watson IoT Platform.
# Parameters
eventId (string): Name of this event
msgFormat (string): Format of the data for this event
data (dict): Data for this event
qos (int): MQTT quality of service level to use (`0`, `1`, or `2`)
onPublish(function): A function that will be called when receipt
of the publication is confirmed.
# Callback and QoS
The use of the optional #onPublish function has different implications depending
on the level of qos used to publish the event:
- qos 0: the client has asynchronously begun to send the event
- qos 1 and 2: the client has confirmation of delivery from the platform
"""
topic = "iot-2/evt/{eventId}/fmt/{msgFormat}".format(eventId=eventId, msgFormat=msgFormat)
return self._publishEvent(topic, eventId, msgFormat, data, qos, onPublish)
def _onCommand(self, client, userdata, pahoMessage):
"""
Internal callback for device command messages, parses source device from topic string and
passes the information on to the registered device command callback
"""
try:
command = Command(pahoMessage, self._messageCodecs)
except InvalidEventException as e:
self.logger.critical(str(e))
else:
self.logger.debug("Received command '%s'" % (command.commandId))
if self.commandCallback:
self.commandCallback(command)
|
epl-1.0
|
ibm-watson-iot/iot-python
|
test/test_api_registry_devicetypes.py
|
2
|
6161
|
# *****************************************************************************
# Copyright (c) 2019 IBM Corporation and other Contributors.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Eclipse Public License v1.0
# which accompanies this distribution, and is available at
# http://www.eclipse.org/legal/epl-v10.html
# *****************************************************************************
import uuid
import pytest
import testUtils
from wiotp.sdk.api.registry.devices import DeviceInfo
from wiotp.sdk.exceptions import ApiException
class TestRegistryDevicetypes(testUtils.AbstractTest):
# =========================================================================
# Device Type tests
# =========================================================================
def testDeviceTypeExistsCheck(self, deviceType):
if deviceType.id in self.appClient.registry.devicetypes:
pass
else:
raise Exception()
if "doesntexist" not in self.appClient.registry.devicetypes:
pass
else:
raise Exception()
def testGetDeviceType(self, deviceType):
retrievedDeviceType = self.appClient.registry.devicetypes[deviceType.id]
assert retrievedDeviceType.id == deviceType.id
assert retrievedDeviceType.classId == "Device"
def testGetDeviceTypeThatDoesntExist(self):
with pytest.raises(Exception):
self.appClient.registry.devicetypes["doesntexist"]
def testUnsupportedCreateUpdate(self):
with pytest.raises(Exception):
self.appClient.registry.devicetypes["d:hldtxx:vm:iot-test-06"] = {"foo", "bar"}
def testListDeviceTypes(self, deviceType):
count = 0
for type in self.appClient.registry.devicetypes:
count += 1
if count > 10:
break
# DeviceTypeDescription test
def testCreateDeviceType(self):
typeId = str(uuid.uuid4())
myDeviceType = self.appClient.registry.devicetypes.create({"id": typeId, "description": "This is a test"})
myDeviceTypeRetrieved = self.appClient.registry.devicetypes[typeId]
assert myDeviceTypeRetrieved.id == typeId
assert myDeviceTypeRetrieved.description == "This is a test"
del self.appClient.registry.devicetypes[typeId]
def testCreateDeviceTypeNone(self):
typeId = str(uuid.uuid4())
myDeviceType = self.appClient.registry.devicetypes.create({"id": typeId, "description": None})
myDeviceTypeRetrieved = self.appClient.registry.devicetypes[typeId]
assert myDeviceTypeRetrieved.id == typeId
assert myDeviceTypeRetrieved.description == None
del self.appClient.registry.devicetypes[typeId]
# Metadata test
def testCreateDeviceMetadata(self):
typeId = str(uuid.uuid4())
myDeviceType = self.appClient.registry.devicetypes.create(
{"id": typeId, "description": "This is still a test", "metadata": {"test": "test"}}
)
myDeviceTypeRetrieved = self.appClient.registry.devicetypes[typeId]
assert myDeviceTypeRetrieved.id == typeId
assert myDeviceTypeRetrieved.description == "This is still a test"
assert myDeviceTypeRetrieved.metadata == {"test": "test"}
del self.appClient.registry.devicetypes[typeId]
def testCreateDeviceMetadataNone(self):
typeId = str(uuid.uuid4())
myDeviceType = self.appClient.registry.devicetypes.create(
{"id": typeId, "description": "This is still a test", "metadata": None}
)
myDeviceTypeRetrieved = self.appClient.registry.devicetypes[typeId]
assert myDeviceTypeRetrieved.id == typeId
assert myDeviceTypeRetrieved.description == "This is still a test"
assert myDeviceTypeRetrieved.metadata == None
del self.appClient.registry.devicetypes[typeId]
def testUpdateDeviceType(self, deviceType):
self.appClient.registry.devicetypes.update(deviceType.id, description="This is still a test")
updatedDeviceType = self.appClient.registry.devicetypes[deviceType.id]
assert updatedDeviceType.description == "This is still a test"
def testUpdateDeviceInfo(self, deviceType):
self.appClient.registry.devicetypes.update(deviceType.id, deviceInfo=DeviceInfo(serialNumber="111"))
updatedDeviceType = self.appClient.registry.devicetypes[deviceType.id]
assert updatedDeviceType.deviceInfo.serialNumber == "111"
# =========================================================================
# Device under DeviceType tests
# =========================================================================
def testDeviceExistsCheck(self, deviceType, device):
if device.deviceId in deviceType.devices:
pass
else:
raise Exception()
if "wheredidyago" not in deviceType.devices:
pass
else:
raise Exception()
def testGetDeviceFromDeviceType(self, deviceType, device):
myDevice = self.appClient.registry.devicetypes[deviceType.id].devices[device.deviceId]
def testListDevicesFromDeviceType(self, deviceType, device):
# Get a device, and cache the response in a local object
count = 0
for device in deviceType.devices:
count += 1
if count > 10:
break
def testCreateDeviceType(self):
with pytest.raises(ApiException):
typeId = 1
r = self.appClient.registry.devicetypes.create(typeId)
def testUpdateDeviceType(self):
with pytest.raises(ApiException):
data = None
r = self.appClient.registry.devicetypes.update(data)
def testDeleteTypeId(self, device, deviceType):
typeId = str(uuid.uuid4())
self.appClient.registry.devicetypes.create(
{"id": typeId, "description": "This is still a test", "metadata": {"test": "test"}}
)
self.appClient.registry.devicetypes.delete(typeId)
assert typeId not in deviceType.devices
|
epl-1.0
|
ibm-watson-iot/iot-python
|
test/test_api_state_logical_interfaces.py
|
2
|
7820
|
# *****************************************************************************
# Copyright (c) 2019 IBM Corporation and other Contributors.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Eclipse Public License v1.0
# which accompanies this distribution, and is available at
# http://www.eclipse.org/legal/epl-v10.html
# *****************************************************************************
#
import uuid
from datetime import datetime
import testUtils
import time
import pytest
from wiotp.sdk.exceptions import ApiException
import string
import json
@testUtils.oneJobOnlyTest
class TestLogicalInterfaces(testUtils.AbstractTest):
testSchemaName = "python-api-test-li-schema"
testLISchema = {
"$schema": "http://json-schema.org/draft-04/schema#",
"type": "object",
"title": "Environment Sensor Schema",
"properties": {
"temperature": {
"description": "temperature in degrees Celsius",
"type": "number",
"minimum": -237.15,
"default": 0.0,
},
"humidity": {"description": "relative humidity (%)", "type": "number", "minimum": 0.0, "default": 0.0},
"publishTimestamp": {"description": "publishTimestamp", "type": "number", "minimum": 0.0, "default": 0.0},
},
"required": ["temperature", "humidity", "publishTimestamp"],
}
testLogicalInterfaceName = "python-api-test-logicalInterface"
updatedLogicalInterfaceName = "python-api-test-logicalInterface-updated"
# =========================================================================
# Set up services
# =========================================================================
def testCleanup(self):
for li in self.appClient.state.draft.logicalInterfaces:
if li.name in (
TestLogicalInterfaces.testLogicalInterfaceName,
TestLogicalInterfaces.updatedLogicalInterfaceName,
):
# print("Deleting old test schema instance: %s" % (a))
del self.appClient.state.draft.logicalInterfaces[li.id]
for s in self.appClient.state.draft.schemas:
if s.name == TestLogicalInterfaces.testSchemaName:
del self.appClient.state.draft.schemas[s.id]
def checkLI(self, logicalInterface, name, description, schemaId, version, alias):
assert logicalInterface.name == name
assert logicalInterface.description == description
assert logicalInterface.schemaId == schemaId
assert logicalInterface.version == version
assert logicalInterface.alias == alias
assert isinstance(logicalInterface.created, datetime)
assert isinstance(logicalInterface.createdBy, str)
assert isinstance(logicalInterface.updated, datetime)
assert isinstance(logicalInterface.updatedBy, str)
def doesSchemaNameExist(self, name):
for a in self.appClient.state.draft.schemas.find({"name": name}):
if a.name == name:
return True
return False
def doesLINameExist(self, name):
for li in self.appClient.state.draft.logicalInterfaces.find({"name": name}):
if li.name == name:
return True
return False
def createSchema(self, name, schemaFileName, schemaContents, description):
jsonSchemaContents = json.dumps(schemaContents)
createdSchema = self.appClient.state.draft.schemas.create(name, schemaFileName, jsonSchemaContents, description)
return createdSchema
def createAndCheckLI(self, name, description, schemaId, version, alias):
createdLI = self.appClient.state.draft.logicalInterfaces.create(
{"name": name, "description": description, "schemaId": schemaId, "version": version, "alias": alias}
)
self.checkLI(createdLI, name, description, schemaId, version, alias)
# now actively refetch the LI to check it is stored
fetchedLI = self.appClient.state.draft.logicalInterfaces.__getitem__(createdLI.id)
assert createdLI == fetchedLI
return createdLI
def testLogicalInterfaceCRUD(self):
test_schema_name = TestLogicalInterfaces.testSchemaName
assert self.doesSchemaNameExist(test_schema_name) == False
testLIName = TestLogicalInterfaces.testLogicalInterfaceName
assert self.doesLINameExist(testLIName) == False
# Create a schema
createdSchema = self.createSchema(
test_schema_name, "liSchema.json", TestLogicalInterfaces.testLISchema, "Test schema description"
)
# Create a Logical Interface
createdLI = self.createAndCheckLI(
testLIName, "Test Logical Interface description", createdSchema.id, "draft", "alias"
)
# Can we search for it
assert self.doesLINameExist(testLIName) == True
# Update the LI
updated_li_name = TestLogicalInterfaces.updatedLogicalInterfaceName
updatedLI = self.appClient.state.draft.logicalInterfaces.update(
createdLI.id,
{
"id": createdLI.id,
"name": updated_li_name,
"description": "Test LI updated description",
"schemaId": createdSchema.id,
"version": "draft",
"alias": "test",
},
)
self.checkLI(updatedLI, updated_li_name, "Test LI updated description", createdSchema.id, "draft", "test")
# Delete the LI
del self.appClient.state.draft.logicalInterfaces[createdLI.id]
# It should be gone
assert self.doesLINameExist(testLIName) == False
# Delete the schema
del self.appClient.state.draft.schemas[createdSchema.id]
# It should be gone
assert self.doesSchemaNameExist(test_schema_name) == False
def testLogicalInterfaceActivation(self):
test_schema_name = TestLogicalInterfaces.testSchemaName
assert self.doesSchemaNameExist(test_schema_name) == False
testLIName = TestLogicalInterfaces.testLogicalInterfaceName
assert self.doesLINameExist(testLIName) == False
# Create a schema
createdSchema = self.createSchema(
test_schema_name, "liSchema.json", TestLogicalInterfaces.testLISchema, "Test schema description"
)
# Create a Logical Interface
createdLI = self.createAndCheckLI(
testLIName, "Test Logical Interface description", createdSchema.id, "draft", "alias"
)
# Can we search for it
assert self.doesLINameExist(testLIName) == True
# Validate and Activate the LI
createdLI.validate()
print("LI Differences: %s " % createdLI.validate())
# Activating the Li should fail as it is not yet associated with a Device or Thing Type.
try:
createdLI.activate()
# Hmm, the activate should raise an exception
assert False
except:
assert True
# The expected exception was raised
# This should fail as there are currently no differences with the LI
try:
createdLI.differences()
# Should raise an exception
assert False
except:
assert True
# The expected exception was raised
# Delete the LI
del self.appClient.state.draft.logicalInterfaces[createdLI.id]
# It should be gone
assert self.doesLINameExist(testLIName) == False
# Delete the schema
del self.appClient.state.draft.schemas[createdSchema.id]
# It should be gone
assert self.doesSchemaNameExist(test_schema_name) == False
|
epl-1.0
|
ibm-watson-iot/iot-python
|
test/test_codecs_utf8.py
|
2
|
1309
|
# *****************************************************************************
# Copyright (c) 2019 IBM Corporation and other Contributors.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Eclipse Public License v1.0
# which accompanies this distribution, and is available at
# http://www.eclipse.org/legal/epl-v10.html
# *****************************************************************************
import os
import testUtils
from wiotp.sdk import InvalidEventException, Utf8Codec
class NonJsonDummyPahoMessage(object):
def __init__(self, object):
self.payload = bytearray()
try:
self.payload.extend(object)
except:
# python 3
self.payload.extend(map(ord, object))
class TestDevice(testUtils.AbstractTest):
def testFileObject(self):
cwd = os.getcwd()
fileContent = None
with open("%s/README.md" % cwd) as fileIn:
fileContent = fileIn.read()
assert fileContent is not None
encodedPayload = Utf8Codec.encode(fileContent, None)
message = Utf8Codec.decode(NonJsonDummyPahoMessage(encodedPayload))
assert message.data.__class__.__name__ in ["str", "unicode"]
assert message.data == fileContent
|
epl-1.0
|
End of preview. Expand
in Data Studio
No dataset card yet
- Downloads last month
- 11