repo_name
stringlengths 7
65
| path
stringlengths 5
185
| copies
stringlengths 1
4
| size
stringlengths 4
6
| content
stringlengths 977
990k
| license
stringclasses 14
values | hash
stringlengths 32
32
| line_mean
float64 7.18
99.4
| line_max
int64 31
999
| alpha_frac
float64 0.25
0.95
| ratio
float64 1.5
7.84
| autogenerated
bool 1
class | config_or_test
bool 2
classes | has_no_keywords
bool 2
classes | has_few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
mozilla/normandy
|
contract-tests/v3_api/test_group_update.py
|
1
|
1210
|
import uuid
from support.assertions import assert_valid_schema
from urllib.parse import urljoin
def test_group_update(conf, requests_session, headers):
# Create a new group
data = {"name": str(uuid.uuid4())}
response = requests_session.post(
urljoin(conf.getoption("server"), "/api/v3/group/"), headers=headers, data=data
)
assert response.status_code == 201
assert_valid_schema(response.json())
group_data = response.json()
group_id = group_data["id"]
# Verify group was stored and contains expected data
response = requests_session.get(
urljoin(conf.getoption("server"), "/api/v3/group/{}/".format(group_id)), headers=headers
)
group_data = response.json()
assert response.status_code == 200
assert_valid_schema(response.json())
# Use the update to change the name
updated_data = {"name": str(uuid.uuid4())}
response = requests_session.put(
urljoin(conf.getoption("server"), "/api/v3/group/{}/".format(group_id)),
headers=headers,
data=updated_data,
)
assert response.status_code == 200
assert_valid_schema(response.json())
assert response.json()["name"] == updated_data["name"]
|
mpl-2.0
|
305dad0d6a7a428344d25835b0331542
| 33.571429
| 96
| 0.661983
| 3.74613
| false
| false
| false
| false
|
mozilla/normandy
|
normandy/conftest.py
|
1
|
3099
|
from django.core.management import call_command
from django.db import connection
from django.db.migrations.executor import MigrationExecutor
import pytest
import requests_mock
from graphene.test import Client as GrapheneClient
from rest_framework.test import APIClient
from normandy.schema import schema as normandy_schema
from normandy.base.tests import UserFactory
from normandy.recipes import geolocation as geolocation_module
from normandy.recipes.tests import fake_sign
@pytest.fixture
def api_client():
"""Fixture to provide a DRF API client."""
user = UserFactory(is_superuser=True)
client = APIClient()
client.force_authenticate(user=user)
return client
@pytest.fixture
def gql_client():
"""Fixture to provide a Graphene client."""
client = GrapheneClient(normandy_schema)
return client
@pytest.fixture
def geolocation():
"""Fixture to load geolocation data."""
geolocation_module.load_geoip_database()
if geolocation_module.geoip_reader is None:
pytest.skip()
else:
return geolocation_module
@pytest.fixture
def mocked_autograph(mocker):
mocked = mocker.patch("normandy.recipes.models.Autographer")
mocked.return_value.sign_data.side_effect = fake_sign
return mocked
@pytest.fixture
def mocked_remotesettings(mocker):
return mocker.patch("normandy.recipes.models.RemoteSettings")
@pytest.fixture
def rs_settings(settings):
settings.REMOTE_SETTINGS_URL = "https://remotesettings.example.com/v1"
settings.REMOTE_SETTINGS_USERNAME = "normandy"
settings.REMOTE_SETTINGS_PASSWORD = "n0rm4ndy"
return settings
@pytest.fixture()
def migrations(transactional_db):
"""
This fixture returns a helper object to test Django data migrations.
Based on: https://gist.github.com/bennylope/82a6088c02fefdd47e18f3c04ec167af
"""
class Migrator(object):
def migrate(self, app, to):
migration = [(app, to)]
executor = MigrationExecutor(connection)
executor.migrate(migration)
return executor.loader.project_state(migration).apps
def reset(self):
call_command("migrate", no_input=True)
return Migrator()
@pytest.fixture
def requestsmock():
"""Return a context where requests are all mocked.
Usage::
def test_something(requestsmock):
requestsmock.get(
'https://example.com/path'
content=b'The content'
)
# Do stuff that involves requests.get('http://example.com/path')
"""
with requests_mock.mock() as m:
yield m
@pytest.fixture
def storage(settings):
settings.DEFAULT_FILE_STORAGE = "normandy.base.storage.NormandyInMemoryStorage"
from django.core.files.storage import default_storage
yield default_storage
dirs_to_delete = ["/"]
while len(dirs_to_delete) > 0:
dir_path = dirs_to_delete.pop()
paths, new_dirs = default_storage.listdir(dir_path)
dirs_to_delete.extend(new_dirs)
for path in paths:
default_storage.delete(path)
|
mpl-2.0
|
6ff28baceb93bf94e973dfee7c503caa
| 26.669643
| 83
| 0.695708
| 3.868914
| false
| true
| false
| false
|
mozilla/normandy
|
normandy/recipes/migrations/0005_auto_20180503_2146.py
|
1
|
2487
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-05-03 21:46
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [("recipes", "0004_auto_20180502_2340")]
operations = [
migrations.RemoveField(model_name="approvalrequest", name="revision"),
migrations.RemoveField(model_name="recipe", name="approved_revision"),
migrations.RemoveField(model_name="recipe", name="latest_revision"),
migrations.DeleteModel(name="RecipeRevision"),
migrations.RenameModel("TmpRecipeRevision", "RecipeRevision"),
migrations.AlterField(
model_name="reciperevision",
name="action",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="recipe_revisions",
to="recipes.Action",
),
),
migrations.AlterField(
model_name="reciperevision",
name="recipe",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="revisions",
to="recipes.Recipe",
),
),
migrations.AlterField(
model_name="reciperevision",
name="user",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="recipe_revisions",
to=settings.AUTH_USER_MODEL,
),
),
migrations.RenameField(
model_name="approvalrequest", old_name="tmp_revision", new_name="revision"
),
migrations.RenameField(
model_name="recipe", old_name="approved_tmp_revision", new_name="approved_revision"
),
migrations.RenameField(
model_name="recipe", old_name="latest_tmp_revision", new_name="latest_revision"
),
migrations.AlterField(
model_name="approvalrequest",
name="revision",
field=models.OneToOneField(
default=None,
on_delete=django.db.models.deletion.CASCADE,
related_name="approval_request",
to="recipes.RecipeRevision",
),
preserve_default=False,
),
]
|
mpl-2.0
|
84226085f1856f60fffbc7d1658a6032
| 35.043478
| 95
| 0.577805
| 4.605556
| false
| false
| false
| false
|
mozilla/normandy
|
contract-tests/v3_api/test_approval_request_close.py
|
1
|
1496
|
from support.assertions import assert_valid_schema
from support.helpers import new_recipe
from urllib.parse import urljoin
def test_approval_request_close(conf, requests_session, headers):
# Get an action we can work with
action_response = requests_session.get(
urljoin(conf.getoption("server"), "/api/v3/action/"), headers=headers
)
data = action_response.json()
action_id = data["results"][0]["id"]
# Create a recipe
recipe_details = new_recipe(requests_session, action_id, conf.getoption("server"), headers)
# Create an approval request
response = requests_session.post(
urljoin(
conf.getoption("server"),
"/api/v3/recipe_revision/{}/request_approval/".format(
recipe_details["latest_revision_id"]
),
),
headers=headers,
)
data = response.json()
approval_id = data["id"]
assert response.status_code != 404
assert_valid_schema(response.json())
# Close the approval request
response = requests_session.post(
urljoin(
conf.getoption("server"), "/api/v3/approval_request/{}/close/".format(approval_id)
),
headers=headers,
)
assert response.status_code == 204
# Verify that is no longer exists
response = requests_session.get(
urljoin(conf.getoption("server"), "/api/v3/approval_request/{}/".format(approval_id)),
headers=headers,
)
assert response.status_code == 404
|
mpl-2.0
|
b8e6270987f62beaf396fe2b9ce6e9d0
| 31.521739
| 95
| 0.637032
| 4.021505
| false
| false
| false
| false
|
mozilla/normandy
|
normandy/recipes/migrations/0014_auto_20190228_1128.py
|
1
|
2576
|
# Generated by Django 2.0.13 on 2019-02-28 11:28
import json
import sys
from urllib.parse import unquote_plus, urlparse
from django.db import migrations
def get_filename_from_url(url):
return unquote_plus(urlparse(url).path.split("/")[-1])
def add_extension_id(apps, schema_editor):
Action = apps.get_model("recipes", "Action")
RecipeRevision = apps.get_model("recipes", "RecipeRevision")
Extension = apps.get_model("studies", "Extension")
failures = []
try:
action = Action.objects.get(name="opt-out-study")
except Action.DoesNotExist:
return # Do nothing since there cannot be any recipes using the opt-out-study action
revisions = RecipeRevision.objects.filter(action_id=action.id)
for revision in revisions:
arguments = json.loads(revision.arguments_json)
url = arguments.get("addonUrl")
filename = get_filename_from_url(url)
try:
extension = Extension.objects.get(xpi=f"extensions/{filename}")
except Extension.DoesNotExist:
failures.append(
{
"filename": filename,
"addon_url": arguments.get("addonUrl"),
"revision_id": revision.id,
"recipe_id": revision.recipe.id,
}
)
else:
arguments["extensionApiId"] = extension.id
revision.arguments_json = json.dumps(arguments)
revision.save()
if failures:
for failure in failures:
sys.stderr.write(f"{failure}\n")
raise Exception("There were failures in this migration.")
def remove_extension_id(apps, schema_editor):
Action = apps.get_model("recipes", "Action")
RecipeRevision = apps.get_model("recipes", "RecipeRevision")
try:
action = Action.objects.get(name="opt-out-study")
except Action.DoesNotExist:
return # Do nothing since there cannot be any recipes using the opt-out-study action
revisions = RecipeRevision.objects.filter(action_id=action.id)
for revision in revisions:
arguments = json.loads(revision.arguments_json)
if "extensionApiId" in arguments:
arguments.pop("extensionApiId")
revision.arguments_json = json.dumps(arguments)
revision.save()
class Migration(migrations.Migration):
dependencies = [
("recipes", "0013_auto_20181018_2049"),
("studies", "0006_extension_hash_algorithm"),
]
operations = [migrations.RunPython(add_extension_id, remove_extension_id)]
|
mpl-2.0
|
0a2d391270318e7b0286c7f6cc490d0f
| 30.802469
| 93
| 0.634705
| 4.088889
| false
| false
| false
| false
|
developmentseed/landsat-util
|
setup.py
|
1
|
1158
|
#!/usr/bin/env python
# Landsat Util
# License: CC0 1.0 Universal
try:
from setuptools import setup
setup_kwargs = {'entry_points': {'console_scripts':['landsat=landsat.landsat:__main__']}}
except ImportError:
from distutils.core import setup
setup_kwargs = {'scripts': ['bin/landsat']}
from landsat import __version__
def readme():
with open('README.rst') as f:
return f.read()
with open('requirements.txt') as fid:
INSTALL_REQUIRES = [l.strip() for l in fid.readlines() if l]
with open('requirements-dev.txt') as fid:
TEST_REQUIRES = [l.strip() for l in fid.readlines() if l]
setup(
name='landsat-util',
version=__version__,
description='A utility to search, download and process Landsat 8' +
' satellite imagery',
long_description=readme(),
author='Development Seed',
author_email='info@developmentseed.org',
url='https://github.com/developmentseed/landsat-util',
packages=['landsat'],
include_package_data=True,
license='CCO',
platforms='Posix; MacOS X; Windows',
install_requires=INSTALL_REQUIRES,
tests_require=TEST_REQUIRES,
**setup_kwargs
)
|
cc0-1.0
|
21e308a331aa8210e6c3216ad6ef8e17
| 25.930233
| 93
| 0.66753
| 3.436202
| false
| true
| false
| false
|
developmentseed/landsat-util
|
landsat/mixins.py
|
3
|
2950
|
# Pansharpened Image Process using Rasterio
# Landsat Util
# License: CC0 1.0 Universal
from __future__ import print_function, division, absolute_import
import sys
import subprocess
from termcolor import colored
class VerbosityMixin(object):
"""
Verbosity Mixin that generates beautiful stdout outputs.
"""
verbose = False
def output(self, value, normal=False, color=None, error=False,
arrow=False, indent=None):
""" Handles verbosity of this calls.
if priority is set to 1, the value is printed
if class instance verbose is True, the value is printed
:param value:
a string representing the message to be printed
:type value:
String
:param normal:
if set to true the message is always printed, otherwise it is only shown if verbosity is set
:type normal:
boolean
:param color:
The color of the message, choices: 'red', 'green', 'blue'
:type normal:
String
:param error:
if set to true the message appears in red
:type error:
Boolean
:param arrow:
if set to true an arrow appears before the message
:type arrow:
Boolean
:param indent:
indents the message based on the number provided
:type indent:
Boolean
:returns:
void
"""
if error and value and (normal or self.verbose):
return self._print(value, color='red', indent=indent)
if self.verbose or normal:
return self._print(value, color, arrow, indent)
return
def subprocess(self, argv):
"""
Execute subprocess commands with proper ouput.
This is no longer used in landsat-util
:param argv:
A list of subprocess arguments
:type argv:
List
:returns:
void
"""
if self.verbose:
proc = subprocess.Popen(argv, stderr=subprocess.PIPE)
else:
proc = subprocess.Popen(argv, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
self.output(proc.stderr.read(), error=True)
return
def exit(self, message):
""" outputs an exit message and exits
:param message:
The message to be outputed
:type message:
String
:returns:
void
"""
self.output(message, normal=True, color="green")
sys.exit()
def _print(self, msg, color=None, arrow=False, indent=None):
""" Print the msg with the color provided. """
if color:
msg = colored(msg, color)
if arrow:
msg = colored('===> ', 'blue') + msg
if indent:
msg = (' ' * indent) + msg
print(msg)
return msg
|
cc0-1.0
|
a023114dc73545a6f35664a0aba26973
| 24.652174
| 104
| 0.554237
| 4.689984
| false
| false
| false
| false
|
rmmh/skybot
|
core/irc.py
|
3
|
10652
|
from __future__ import print_function
from builtins import map
from builtins import object
import re
import socket
import time
import _thread
import queue
from ssl import wrap_socket, CERT_NONE, CERT_REQUIRED, SSLError
DEFAULT_NAME = "skybot"
DEFAULT_REALNAME = "Python bot - http://github.com/rmmh/skybot"
DEFAULT_NICKSERV_NAME = "nickserv"
DEFAULT_NICKSERV_COMMAND = "IDENTIFY %s"
def decode(txt):
for codec in ("utf-8", "iso-8859-1", "shift_jis", "cp1252"):
try:
return txt.decode(codec)
except UnicodeDecodeError:
continue
return txt.decode("utf-8", "ignore")
def censor(text, censored_strings=None):
text = re.sub("[\n\r]+", " ", text)
if not censored_strings:
return text
words = map(re.escape, censored_strings)
pattern = "(%s)" % "|".join(words)
text = re.sub(pattern, "[censored]", text)
return text
class crlf_tcp(object):
"Handles tcp connections that consist of utf-8 lines ending with crlf"
def __init__(self, host, port, timeout=300):
self.ibuffer = b""
self.obuffer = b""
self.oqueue = queue.Queue() # lines to be sent out
self.iqueue = queue.Queue() # lines that were received
self.socket = self.create_socket()
self.host = host
self.port = port
self.timeout = timeout
def create_socket(self):
return socket.socket(socket.AF_INET, socket.TCP_NODELAY)
def run(self):
while True:
try:
self.socket.connect((self.host, self.port))
except socket.timeout:
print("timed out connecting to %s:%s" % (self.host, self.port))
time.sleep(60)
else:
break
_thread.start_new_thread(self.recv_loop, ())
_thread.start_new_thread(self.send_loop, ())
def recv_from_socket(self, nbytes):
return self.socket.recv(nbytes)
def get_timeout_exception_type(self):
return socket.timeout
def handle_receive_exception(self, error, last_timestamp):
if time.time() - last_timestamp > self.timeout:
self.iqueue.put(StopIteration)
self.socket.close()
return True
return False
def recv_loop(self):
last_timestamp = time.time()
while True:
try:
data = self.recv_from_socket(4096)
self.ibuffer += data
if data:
last_timestamp = time.time()
else:
if time.time() - last_timestamp > self.timeout:
self.iqueue.put(StopIteration)
self.socket.close()
return
time.sleep(1)
except (self.get_timeout_exception_type(), socket.error) as e:
if self.handle_receive_exception(e, last_timestamp):
return
continue
while b"\r\n" in self.ibuffer:
line, self.ibuffer = self.ibuffer.split(b"\r\n", 1)
self.iqueue.put(decode(line))
def send_loop(self):
while True:
line = self.oqueue.get().splitlines()[0][:500]
print(">>> %s" % line)
self.obuffer += line.encode("utf-8", "replace") + b"\r\n"
while self.obuffer:
sent = self.socket.send(self.obuffer)
self.obuffer = self.obuffer[sent:]
class crlf_ssl_tcp(crlf_tcp):
"Handles ssl tcp connetions that consist of utf-8 lines ending with crlf"
def __init__(self, host, port, ignore_cert_errors, timeout=300):
self.ignore_cert_errors = ignore_cert_errors
crlf_tcp.__init__(self, host, port, timeout)
def create_socket(self):
return wrap_socket(
crlf_tcp.create_socket(self),
server_side=False,
cert_reqs=CERT_NONE if self.ignore_cert_errors else CERT_REQUIRED,
)
def recv_from_socket(self, nbytes):
return self.socket.read(nbytes)
def get_timeout_exception_type(self):
return SSLError
def handle_receive_exception(self, error, last_timestamp):
return crlf_tcp.handle_receive_exception(self, error, last_timestamp)
def zip_channels(channels):
channels.sort(key=lambda x: " " not in x) # keyed channels first
chans = []
keys = []
for channel in channels:
if " " in channel:
chan, key = channel.split(" ")
chans.append(chan)
keys.append(key)
else:
chans.append(channel)
chans = ",".join(chans)
if keys:
return [chans, ",".join(keys)]
else:
return [chans]
def test_zip_channels():
assert zip_channels(["#a", "#b c", "#d"]) == ["#b,#a,#d", "c"]
assert zip_channels(["#a", "#b"]) == ["#a,#b"]
class IRC(object):
IRC_PREFIX_REM = re.compile(r"(.*?) (.*?) (.*)").match
IRC_NOPROFEIX_REM = re.compile(r"()(.*?) (.*)").match
IRC_NETMASK_REM = re.compile(r":?([^!@]*)!?([^@]*)@?(.*)").match
IRC_PARAM_REF = re.compile(r"(?:^|(?<= ))(:.*|[^ ]+)").findall
"handles the IRC protocol"
# see the docs/ folder for more information on the protocol
def __init__(self, conf):
self.conn = None
self.nick = DEFAULT_NAME
self.user = DEFAULT_NAME
self.realname = DEFAULT_REALNAME
self.user_mode = None
self.server_host = None
self.server_port = 6667
self.server_password = None
self.nickserv_password = None
self.nickserv_name = DEFAULT_NICKSERV_NAME
self.nickserv_command = DEFAULT_NICKSERV_COMMAND
self.channels = []
self.admins = []
self.censored_strings = []
self.out = queue.Queue() # responses from the server are placed here
# format: [rawline, prefix, command, params,
# nick, user, host, paramlist, msg]
self.set_conf(conf)
self.connect()
_thread.start_new_thread(self.parse_loop, ())
def set_conf(self, conf):
self.nick = conf.get("nick", DEFAULT_NAME)
self.user = conf.get("user", DEFAULT_NAME)
self.realname = conf.get("realname", DEFAULT_REALNAME)
self.user_mode = conf.get("mode", None)
self.server_host = conf["server"]
self.server_port = conf.get("port", 6667)
self.server_password = conf.get("server_password", None)
self.nickserv_password = conf.get("nickserv_password", None)
self.nickserv_name = conf.get("nickserv_name", DEFAULT_NICKSERV_NAME)
self.nickserv_command = conf.get("nickserv_command", DEFAULT_NICKSERV_COMMAND)
self.channels = conf.get("channels", [])
self.admins = conf.get("admins", [])
self.censored_strings = conf.get("censored_strings", [])
if self.conn is not None:
self.join_channels()
def create_connection(self):
return crlf_tcp(self.server_host, self.server_port)
def connect(self):
self.conn = self.create_connection()
_thread.start_new_thread(self.conn.run, ())
self.cmd("NICK", [self.nick])
self.cmd("USER", [self.user, "3", "*", self.realname])
if self.server_password:
self.cmd("PASS", [self.server_password])
def parse_loop(self):
while True:
msg = self.conn.iqueue.get()
if msg == StopIteration:
self.connect()
continue
if msg.startswith(":"): # has a prefix
prefix, command, params = self.IRC_PREFIX_REM(msg).groups()
else:
prefix, command, params = self.IRC_NOPROFEIX_REM(msg).groups()
nick, user, host = self.IRC_NETMASK_REM(prefix).groups()
paramlist = self.IRC_PARAM_REF(params)
lastparam = ""
if paramlist:
if paramlist[-1].startswith(":"):
paramlist[-1] = paramlist[-1][1:]
lastparam = paramlist[-1]
self.out.put(
[msg, prefix, command, params, nick, user, host, paramlist, lastparam]
)
if command == "PING":
self.cmd("PONG", paramlist)
def join(self, channel):
self.cmd("JOIN", channel.split(" ")) # [chan, password]
def join_channels(self):
if self.channels:
# TODO: send multiple join commands for large channel lists
self.cmd("JOIN", zip_channels(self.channels))
def msg(self, target, text):
self.cmd("PRIVMSG", [target, text])
def cmd(self, command, params=None):
if params:
params[-1] = ":" + params[-1]
params = [censor(p, self.censored_strings) for p in params]
self.send(command + " " + " ".join(params))
else:
self.send(command)
def send(self, str):
self.conn.oqueue.put(str)
class FakeIRC(IRC):
def __init__(self, conf):
self.set_conf(conf)
self.out = queue.Queue() # responses from the server are placed here
self.f = open(fn, "rb")
_thread.start_new_thread(self.parse_loop, ())
def parse_loop(self):
while True:
msg = decode(self.f.readline()[9:])
if msg == "":
print("!!!!DONE READING FILE!!!!")
return
if msg.startswith(":"): # has a prefix
prefix, command, params = irc_prefix_rem(msg).groups()
else:
prefix, command, params = irc_noprefix_rem(msg).groups()
nick, user, host = irc_netmask_rem(prefix).groups()
paramlist = irc_param_ref(params)
lastparam = ""
if paramlist:
if paramlist[-1].startswith(":"):
paramlist[-1] = paramlist[-1][1:]
lastparam = paramlist[-1]
self.out.put(
[msg, prefix, command, params, nick, user, host, paramlist, lastparam]
)
if command == "PING":
self.cmd("PONG", [params])
def cmd(self, command, params=None):
pass
class SSLIRC(IRC):
def __init__(self, conf):
super(SSLIRC, self).__init__(conf=conf)
self.server_port = 6697
self.server_ignore_cert = False
def set_conf(self, conf):
super(SSLIRC, self).set_conf(conf)
self.server_port = conf.get("port", 6697)
self.server_ignore_cert = conf.get("ignore_cert", False)
def create_connection(self):
return crlf_ssl_tcp(self.server_host, self.server_port, self.server_ignore_cert)
|
unlicense
|
8a5ef5e7c64db739c054b8d67d63de1f
| 30.329412
| 88
| 0.556515
| 3.705043
| false
| false
| false
| false
|
rmmh/skybot
|
plugins/util/http.py
|
3
|
5942
|
from future.standard_library import hooks
from lxml import etree, html
import binascii
import collections
import hmac
import json
import random
import time
from hashlib import sha1
from builtins import str
from builtins import range
try:
from http.cookiejar import CookieJar
except:
from future.backports.http.cookiejar import CookieJar
with hooks():
import urllib.request, urllib.parse, urllib.error
from urllib.parse import (
quote,
unquote,
urlencode,
urlparse,
parse_qsl,
quote_plus as _quote_plus,
)
from urllib.error import HTTPError, URLError
ua_skybot = "Skybot/1.0 https://github.com/rmmh/skybot"
ua_firefox = (
"Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.6) "
"Gecko/20070725 Firefox/2.0.0.6"
)
ua_internetexplorer = "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)"
def get_cookie_jar():
if not hasattr(get_cookie_jar, "memo"):
get_cookie_jar.memo = CookieJar()
return get_cookie_jar.memo
def clear_expired_cookies():
get_cookie_jar().clear_expired_cookies()
def get(*args, **kwargs):
return open(*args, **kwargs).read().decode("utf-8")
def get_html(*args, **kwargs):
return html.fromstring(open(*args, **kwargs).read())
def get_xml(*args, **kwargs):
return etree.fromstring(open(*args, **kwargs).read())
def get_json(*args, **kwargs):
return json.loads(open(*args, **kwargs).read())
def open(
url,
query_params=None,
post_data=None,
json_data=None,
get_method=None,
cookies=False,
oauth=False,
oauth_keys=None,
headers=None,
**kwargs
):
if query_params is None:
query_params = {}
query_params.update(kwargs)
url = prepare_url(url, query_params)
if post_data and isinstance(post_data, collections.Mapping):
post_data = urllib.parse.urlencode(post_data)
post_data = post_data.encode("UTF-8")
if json_data and isinstance(json_data, dict):
post_data = json.dumps(json_data).encode("utf-8")
request = urllib.request.Request(url, post_data)
if json_data:
request.add_header("Content-Type", "application/json")
if get_method is not None:
request.get_method = lambda: get_method
if headers is not None:
for header_key, header_value in headers.items():
request.add_header(header_key, header_value)
if "User-Agent" not in request.headers:
request.add_header("User-Agent", ua_skybot)
if oauth:
nonce = oauth_nonce()
timestamp = oauth_timestamp()
api_url, req_data = url.split("?")
unsigned_request = oauth_unsigned_request(
nonce, timestamp, req_data, oauth_keys["consumer"], oauth_keys["access"]
)
signature = oauth_sign_request(
"GET",
api_url,
req_data,
unsigned_request,
oauth_keys["consumer_secret"],
oauth_keys["access_secret"],
)
header = oauth_build_header(
nonce, signature, timestamp, oauth_keys["consumer"], oauth_keys["access"]
)
request.add_header("Authorization", header)
if cookies:
opener = urllib.request.build_opener(
urllib.request.HTTPCookieProcessor(get_cookie_jar())
)
else:
opener = urllib.request.build_opener()
return opener.open(request)
def prepare_url(url, queries):
if queries:
scheme, netloc, path, query, fragment = urllib.parse.urlsplit(str(url))
query = dict(urllib.parse.parse_qsl(query))
query.update(queries)
query = urllib.parse.urlencode(
dict((to_utf8(key), to_utf8(value)) for key, value in query.items())
)
url = urllib.parse.urlunsplit((scheme, netloc, path, query, fragment))
return url
def to_utf8(s):
if isinstance(s, str):
return s.encode("utf8", "ignore")
else:
return str(s)
def quote_plus(s):
return _quote_plus(to_utf8(s))
def oauth_nonce():
return "".join([str(random.randint(0, 9)) for i in range(8)])
def oauth_timestamp():
return str(int(time.time()))
def oauth_unsigned_request(nonce, timestamp, req, consumer, token):
d = {
"oauth_consumer_key": consumer,
"oauth_nonce": nonce,
"oauth_signature_method": "HMAC-SHA1",
"oauth_timestamp": timestamp,
"oauth_token": token,
"oauth_version": "1.0",
}
d.update(urllib.parse.parse_qsl(req))
request_items = d.items()
# TODO: Remove this when Python 2 is no longer supported.
# some of the fields are actual string and others are
# a wrapper of str for the python 3 migration.
# Convert them all so that they sort correctly.
request_items = [(str(k), str(v)) for k, v in request_items]
return quote(urllib.parse.urlencode(sorted(request_items, key=lambda key: key[0])))
def oauth_build_header(nonce, signature, timestamp, consumer, token):
d = {
"oauth_consumer_key": consumer,
"oauth_nonce": nonce,
"oauth_signature": signature,
"oauth_signature_method": "HMAC-SHA1",
"oauth_timestamp": timestamp,
"oauth_token": token,
"oauth_version": "1.0",
}
header = "OAuth "
for x in sorted(d, key=lambda key: key[0]):
header += x + '="' + d[x] + '", '
return header[:-1]
def oauth_sign_request(
method, url, params, unsigned_request, consumer_secret, token_secret
):
key = consumer_secret + "&" + token_secret
key = key.encode("utf-8", "replace")
base = method + "&" + quote(url, "") + "&" + unsigned_request
base = base.encode("utf-8", "replace")
hash = hmac.new(key, base, sha1)
signature = quote(binascii.b2a_base64(hash.digest())[:-1])
return signature
def unescape(s):
if not s.strip():
return s
return html.fromstring(s).text_content()
|
unlicense
|
8d528fae968f41036b202e03cfa36487
| 23.861925
| 87
| 0.619152
| 3.560216
| false
| false
| false
| false
|
pytube/pytube
|
pytube/query.py
|
1
|
12622
|
"""This module provides a query interface for media streams and captions."""
from collections.abc import Mapping, Sequence
from typing import Callable, List, Optional, Union
from pytube import Caption, Stream
from pytube.helpers import deprecated
class StreamQuery(Sequence):
"""Interface for querying the available media streams."""
def __init__(self, fmt_streams):
"""Construct a :class:`StreamQuery <StreamQuery>`.
param list fmt_streams:
list of :class:`Stream <Stream>` instances.
"""
self.fmt_streams = fmt_streams
self.itag_index = {int(s.itag): s for s in fmt_streams}
def filter(
self,
fps=None,
res=None,
resolution=None,
mime_type=None,
type=None,
subtype=None,
file_extension=None,
abr=None,
bitrate=None,
video_codec=None,
audio_codec=None,
only_audio=None,
only_video=None,
progressive=None,
adaptive=None,
is_dash=None,
custom_filter_functions=None,
):
"""Apply the given filtering criterion.
:param fps:
(optional) The frames per second.
:type fps:
int or None
:param resolution:
(optional) Alias to ``res``.
:type res:
str or None
:param res:
(optional) The video resolution.
:type resolution:
str or None
:param mime_type:
(optional) Two-part identifier for file formats and format contents
composed of a "type", a "subtype".
:type mime_type:
str or None
:param type:
(optional) Type part of the ``mime_type`` (e.g.: audio, video).
:type type:
str or None
:param subtype:
(optional) Sub-type part of the ``mime_type`` (e.g.: mp4, mov).
:type subtype:
str or None
:param file_extension:
(optional) Alias to ``sub_type``.
:type file_extension:
str or None
:param abr:
(optional) Average bitrate (ABR) refers to the average amount of
data transferred per unit of time (e.g.: 64kbps, 192kbps).
:type abr:
str or None
:param bitrate:
(optional) Alias to ``abr``.
:type bitrate:
str or None
:param video_codec:
(optional) Video compression format.
:type video_codec:
str or None
:param audio_codec:
(optional) Audio compression format.
:type audio_codec:
str or None
:param bool progressive:
Excludes adaptive streams (one file contains both audio and video
tracks).
:param bool adaptive:
Excludes progressive streams (audio and video are on separate
tracks).
:param bool is_dash:
Include/exclude dash streams.
:param bool only_audio:
Excludes streams with video tracks.
:param bool only_video:
Excludes streams with audio tracks.
:param custom_filter_functions:
(optional) Interface for defining complex filters without
subclassing.
:type custom_filter_functions:
list or None
"""
filters = []
if res or resolution:
filters.append(lambda s: s.resolution == (res or resolution))
if fps:
filters.append(lambda s: s.fps == fps)
if mime_type:
filters.append(lambda s: s.mime_type == mime_type)
if type:
filters.append(lambda s: s.type == type)
if subtype or file_extension:
filters.append(lambda s: s.subtype == (subtype or file_extension))
if abr or bitrate:
filters.append(lambda s: s.abr == (abr or bitrate))
if video_codec:
filters.append(lambda s: s.video_codec == video_codec)
if audio_codec:
filters.append(lambda s: s.audio_codec == audio_codec)
if only_audio:
filters.append(
lambda s: (
s.includes_audio_track and not s.includes_video_track
),
)
if only_video:
filters.append(
lambda s: (
s.includes_video_track and not s.includes_audio_track
),
)
if progressive:
filters.append(lambda s: s.is_progressive)
if adaptive:
filters.append(lambda s: s.is_adaptive)
if custom_filter_functions:
filters.extend(custom_filter_functions)
if is_dash is not None:
filters.append(lambda s: s.is_dash == is_dash)
return self._filter(filters)
def _filter(self, filters: List[Callable]) -> "StreamQuery":
fmt_streams = self.fmt_streams
for filter_lambda in filters:
fmt_streams = filter(filter_lambda, fmt_streams)
return StreamQuery(list(fmt_streams))
def order_by(self, attribute_name: str) -> "StreamQuery":
"""Apply a sort order. Filters out stream the do not have the attribute.
:param str attribute_name:
The name of the attribute to sort by.
"""
has_attribute = [
s
for s in self.fmt_streams
if getattr(s, attribute_name) is not None
]
# Check that the attributes have string values.
if has_attribute and isinstance(
getattr(has_attribute[0], attribute_name), str
):
# Try to return a StreamQuery sorted by the integer representations
# of the values.
try:
return StreamQuery(
sorted(
has_attribute,
key=lambda s: int(
"".join(
filter(str.isdigit, getattr(s, attribute_name))
)
), # type: ignore # noqa: E501
)
)
except ValueError:
pass
return StreamQuery(
sorted(has_attribute, key=lambda s: getattr(s, attribute_name))
)
def desc(self) -> "StreamQuery":
"""Sort streams in descending order.
:rtype: :class:`StreamQuery <StreamQuery>`
"""
return StreamQuery(self.fmt_streams[::-1])
def asc(self) -> "StreamQuery":
"""Sort streams in ascending order.
:rtype: :class:`StreamQuery <StreamQuery>`
"""
return self
def get_by_itag(self, itag: int) -> Optional[Stream]:
"""Get the corresponding :class:`Stream <Stream>` for a given itag.
:param int itag:
YouTube format identifier code.
:rtype: :class:`Stream <Stream>` or None
:returns:
The :class:`Stream <Stream>` matching the given itag or None if
not found.
"""
return self.itag_index.get(int(itag))
def get_by_resolution(self, resolution: str) -> Optional[Stream]:
"""Get the corresponding :class:`Stream <Stream>` for a given resolution.
Stream must be a progressive mp4.
:param str resolution:
Video resolution i.e. "720p", "480p", "360p", "240p", "144p"
:rtype: :class:`Stream <Stream>` or None
:returns:
The :class:`Stream <Stream>` matching the given itag or None if
not found.
"""
return self.filter(
progressive=True, subtype="mp4", resolution=resolution
).first()
def get_lowest_resolution(self) -> Optional[Stream]:
"""Get lowest resolution stream that is a progressive mp4.
:rtype: :class:`Stream <Stream>` or None
:returns:
The :class:`Stream <Stream>` matching the given itag or None if
not found.
"""
return (
self.filter(progressive=True, subtype="mp4")
.order_by("resolution")
.first()
)
def get_highest_resolution(self) -> Optional[Stream]:
"""Get highest resolution stream that is a progressive video.
:rtype: :class:`Stream <Stream>` or None
:returns:
The :class:`Stream <Stream>` matching the given itag or None if
not found.
"""
return self.filter(progressive=True).order_by("resolution").last()
def get_audio_only(self, subtype: str = "mp4") -> Optional[Stream]:
"""Get highest bitrate audio stream for given codec (defaults to mp4)
:param str subtype:
Audio subtype, defaults to mp4
:rtype: :class:`Stream <Stream>` or None
:returns:
The :class:`Stream <Stream>` matching the given itag or None if
not found.
"""
return (
self.filter(only_audio=True, subtype=subtype)
.order_by("abr")
.last()
)
def otf(self, is_otf: bool = False) -> "StreamQuery":
"""Filter stream by OTF, useful if some streams have 404 URLs
:param bool is_otf: Set to False to retrieve only non-OTF streams
:rtype: :class:`StreamQuery <StreamQuery>`
:returns: A StreamQuery object with otf filtered streams
"""
return self._filter([lambda s: s.is_otf == is_otf])
def first(self) -> Optional[Stream]:
"""Get the first :class:`Stream <Stream>` in the results.
:rtype: :class:`Stream <Stream>` or None
:returns:
the first result of this query or None if the result doesn't
contain any streams.
"""
try:
return self.fmt_streams[0]
except IndexError:
return None
def last(self):
"""Get the last :class:`Stream <Stream>` in the results.
:rtype: :class:`Stream <Stream>` or None
:returns:
Return the last result of this query or None if the result
doesn't contain any streams.
"""
try:
return self.fmt_streams[-1]
except IndexError:
pass
@deprecated("Get the size of this list directly using len()")
def count(self, value: Optional[str] = None) -> int: # pragma: no cover
"""Get the count of items in the list.
:rtype: int
"""
if value:
return self.fmt_streams.count(value)
return len(self)
@deprecated("This object can be treated as a list, all() is useless")
def all(self) -> List[Stream]: # pragma: no cover
"""Get all the results represented by this query as a list.
:rtype: list
"""
return self.fmt_streams
def __getitem__(self, i: Union[slice, int]):
return self.fmt_streams[i]
def __len__(self) -> int:
return len(self.fmt_streams)
def __repr__(self) -> str:
return f"{self.fmt_streams}"
class CaptionQuery(Mapping):
"""Interface for querying the available captions."""
def __init__(self, captions: List[Caption]):
"""Construct a :class:`Caption <Caption>`.
param list captions:
list of :class:`Caption <Caption>` instances.
"""
self.lang_code_index = {c.code: c for c in captions}
@deprecated(
"This object can be treated as a dictionary, i.e. captions['en']"
)
def get_by_language_code(
self, lang_code: str
) -> Optional[Caption]: # pragma: no cover
"""Get the :class:`Caption <Caption>` for a given ``lang_code``.
:param str lang_code:
The code that identifies the caption language.
:rtype: :class:`Caption <Caption>` or None
:returns:
The :class:`Caption <Caption>` matching the given ``lang_code`` or
None if it does not exist.
"""
return self.lang_code_index.get(lang_code)
@deprecated("This object can be treated as a dictionary")
def all(self) -> List[Caption]: # pragma: no cover
"""Get all the results represented by this query as a list.
:rtype: list
"""
return list(self.lang_code_index.values())
def __getitem__(self, i: str):
return self.lang_code_index[i]
def __len__(self) -> int:
return len(self.lang_code_index)
def __iter__(self):
return iter(self.lang_code_index.values())
def __repr__(self) -> str:
return f"{self.lang_code_index}"
|
unlicense
|
4cd3b1805a5fb6ff5a344f4ebe671490
| 28.980998
| 81
| 0.552131
| 4.29612
| false
| false
| false
| false
|
pytube/pytube
|
pytube/innertube.py
|
1
|
11658
|
"""This module is designed to interact with the innertube API.
This module is NOT intended to be used directly by end users, as each of the
interfaces returns raw results. These should instead be parsed to extract
the useful information for the end user.
"""
# Native python imports
import json
import os
import pathlib
import time
from urllib import parse
# Local imports
from pytube import request
# YouTube on TV client secrets
_client_id = '861556708454-d6dlm3lh05idd8npek18k6be8ba3oc68.apps.googleusercontent.com'
_client_secret = 'SboVhoG9s0rNafixCSGGKXAT'
# Extracted API keys -- unclear what these are linked to.
_api_keys = [
'AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8',
'AIzaSyCtkvNIR1HCEwzsqK6JuE6KqpyjusIRI30',
'AIzaSyA8eiZmM1FaDVjRy-df2KTyQ_vz_yYM39w',
'AIzaSyC8UYZpvA2eknNex0Pjid0_eTLJoDu6los',
'AIzaSyCjc_pVEDi4qsv5MtC2dMXzpIaDoRFLsxw',
'AIzaSyDHQ9ipnphqTzDqZsbtd8_Ru4_kiKVQe2k'
]
_default_clients = {
'WEB': {
'context': {
'client': {
'clientName': 'WEB',
'clientVersion': '2.20200720.00.02'
}
},
'api_key': 'AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8'
},
'ANDROID': {
'context': {
'client': {
'clientName': 'ANDROID',
'clientVersion': '16.20'
}
},
'api_key': 'AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8'
},
'WEB_EMBED': {
'context': {
'client': {
'clientName': 'WEB',
'clientVersion': '2.20210721.00.00',
'clientScreen': 'EMBED'
}
},
'api_key': 'AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8'
},
'ANDROID_EMBED': {
'context': {
'client': {
'clientName': 'ANDROID',
'clientVersion': '16.20',
'clientScreen': 'EMBED'
}
},
'api_key': 'AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8'
}
}
_token_timeout = 1800
_cache_dir = pathlib.Path(__file__).parent.resolve() / '__cache__'
_token_file = os.path.join(_cache_dir, 'tokens.json')
class InnerTube:
"""Object for interacting with the innertube API."""
def __init__(self, client='ANDROID', use_oauth=False, allow_cache=True):
"""Initialize an InnerTube object.
:param str client:
Client to use for the object.
Default to web because it returns the most playback types.
:param bool use_oauth:
Whether or not to authenticate to YouTube.
:param bool allow_cache:
Allows caching of oauth tokens on the machine.
"""
self.context = _default_clients[client]['context']
self.api_key = _default_clients[client]['api_key']
self.access_token = None
self.refresh_token = None
self.use_oauth = use_oauth
self.allow_cache = allow_cache
# Stored as epoch time
self.expires = None
# Try to load from file if specified
if self.use_oauth and self.allow_cache:
# Try to load from file if possible
if os.path.exists(_token_file):
with open(_token_file) as f:
data = json.load(f)
self.access_token = data['access_token']
self.refresh_token = data['refresh_token']
self.expires = data['expires']
self.refresh_bearer_token()
def cache_tokens(self):
"""Cache tokens to file if allowed."""
if not self.allow_cache:
return
data = {
'access_token': self.access_token,
'refresh_token': self.refresh_token,
'expires': self.expires
}
if not os.path.exists(_cache_dir):
os.mkdir(_cache_dir)
with open(_token_file, 'w') as f:
json.dump(data, f)
def refresh_bearer_token(self, force=False):
"""Refreshes the OAuth token if necessary.
:param bool force:
Force-refresh the bearer token.
"""
if not self.use_oauth:
return
# Skip refresh if it's not necessary and not forced
if self.expires > time.time() and not force:
return
# Subtracting 30 seconds is arbitrary to avoid potential time discrepencies
start_time = int(time.time() - 30)
data = {
'client_id': _client_id,
'client_secret': _client_secret,
'grant_type': 'refresh_token',
'refresh_token': self.refresh_token
}
response = request._execute_request(
'https://oauth2.googleapis.com/token',
'POST',
headers={
'Content-Type': 'application/json'
},
data=data
)
response_data = json.loads(response.read())
self.access_token = response_data['access_token']
self.expires = start_time + response_data['expires_in']
self.cache_tokens()
def fetch_bearer_token(self):
"""Fetch an OAuth token."""
# Subtracting 30 seconds is arbitrary to avoid potential time discrepencies
start_time = int(time.time() - 30)
data = {
'client_id': _client_id,
'scope': 'https://www.googleapis.com/auth/youtube'
}
response = request._execute_request(
'https://oauth2.googleapis.com/device/code',
'POST',
headers={
'Content-Type': 'application/json'
},
data=data
)
response_data = json.loads(response.read())
verification_url = response_data['verification_url']
user_code = response_data['user_code']
print(f'Please open {verification_url} and input code {user_code}')
input('Press enter when you have completed this step.')
data = {
'client_id': _client_id,
'client_secret': _client_secret,
'device_code': response_data['device_code'],
'grant_type': 'urn:ietf:params:oauth:grant-type:device_code'
}
response = request._execute_request(
'https://oauth2.googleapis.com/token',
'POST',
headers={
'Content-Type': 'application/json'
},
data=data
)
response_data = json.loads(response.read())
self.access_token = response_data['access_token']
self.refresh_token = response_data['refresh_token']
self.expires = start_time + response_data['expires_in']
self.cache_tokens()
@property
def base_url(self):
"""Return the base url endpoint for the innertube API."""
return 'https://www.youtube.com/youtubei/v1'
@property
def base_data(self):
"""Return the base json data to transmit to the innertube API."""
return {
'context': self.context
}
@property
def base_params(self):
"""Return the base query parameters to transmit to the innertube API."""
return {
'key': self.api_key,
'contentCheckOk': True,
'racyCheckOk': True
}
def _call_api(self, endpoint, query, data):
"""Make a request to a given endpoint with the provided query parameters and data."""
# Remove the API key if oauth is being used.
if self.use_oauth:
del query['key']
endpoint_url = f'{endpoint}?{parse.urlencode(query)}'
headers = {
'Content-Type': 'application/json',
}
# Add the bearer token if applicable
if self.use_oauth:
if self.access_token:
self.refresh_bearer_token()
headers['Authorization'] = f'Bearer {self.access_token}'
else:
self.fetch_bearer_token()
headers['Authorization'] = f'Bearer {self.access_token}'
response = request._execute_request(
endpoint_url,
'POST',
headers=headers,
data=data
)
return json.loads(response.read())
def browse(self):
"""Make a request to the browse endpoint.
TODO: Figure out how we can use this
"""
# endpoint = f'{self.base_url}/browse' # noqa:E800
...
# return self._call_api(endpoint, query, self.base_data) # noqa:E800
def config(self):
"""Make a request to the config endpoint.
TODO: Figure out how we can use this
"""
# endpoint = f'{self.base_url}/config' # noqa:E800
...
# return self._call_api(endpoint, query, self.base_data) # noqa:E800
def guide(self):
"""Make a request to the guide endpoint.
TODO: Figure out how we can use this
"""
# endpoint = f'{self.base_url}/guide' # noqa:E800
...
# return self._call_api(endpoint, query, self.base_data) # noqa:E800
def next(self):
"""Make a request to the next endpoint.
TODO: Figure out how we can use this
"""
# endpoint = f'{self.base_url}/next' # noqa:E800
...
# return self._call_api(endpoint, query, self.base_data) # noqa:E800
def player(self, video_id):
"""Make a request to the player endpoint.
:param str video_id:
The video id to get player info for.
:rtype: dict
:returns:
Raw player info results.
"""
endpoint = f'{self.base_url}/player'
query = {
'videoId': video_id,
}
query.update(self.base_params)
return self._call_api(endpoint, query, self.base_data)
def search(self, search_query, continuation=None):
"""Make a request to the search endpoint.
:param str search_query:
The query to search.
:rtype: dict
:returns:
Raw search query results.
"""
endpoint = f'{self.base_url}/search'
query = {
'query': search_query
}
query.update(self.base_params)
data = {}
if continuation:
data['continuation'] = continuation
data.update(self.base_data)
return self._call_api(endpoint, query, data)
def verify_age(self, video_id):
"""Make a request to the age_verify endpoint.
Notable examples of the types of video this verification step is for:
* https://www.youtube.com/watch?v=QLdAhwSBZ3w
* https://www.youtube.com/watch?v=hc0ZDaAZQT0
:param str video_id:
The video id to get player info for.
:rtype: dict
:returns:
Returns information that includes a URL for bypassing certain restrictions.
"""
endpoint = f'{self.base_url}/verify_age'
data = {
'nextEndpoint': {
'urlEndpoint': {
'url': f'/watch?v={video_id}'
}
},
'setControvercy': True
}
data.update(self.base_data)
result = self._call_api(endpoint, self.base_params, data)
return result
def get_transcript(self, video_id):
"""Make a request to the get_transcript endpoint.
This is likely related to captioning for videos, but is currently untested.
"""
endpoint = f'{self.base_url}/get_transcript'
query = {
'videoId': video_id,
}
query.update(self.base_params)
result = self._call_api(endpoint, query, self.base_data)
return result
|
unlicense
|
ba0aec650f36c17aa80c51e0edb576d3
| 31.473538
| 93
| 0.556613
| 3.81979
| false
| false
| false
| false
|
pytube/pytube
|
pytube/contrib/playlist.py
|
1
|
14204
|
"""Module to download a complete playlist from a youtube channel."""
import json
import logging
from collections.abc import Sequence
from datetime import date, datetime
from typing import Dict, Iterable, List, Optional, Tuple, Union
from pytube import extract, request, YouTube
from pytube.helpers import cache, DeferredGeneratorList, install_proxy, uniqueify
logger = logging.getLogger(__name__)
class Playlist(Sequence):
"""Load a YouTube playlist with URL"""
def __init__(self, url: str, proxies: Optional[Dict[str, str]] = None):
if proxies:
install_proxy(proxies)
self._input_url = url
# These need to be initialized as None for the properties.
self._html = None
self._ytcfg = None
self._initial_data = None
self._sidebar_info = None
self._playlist_id = None
@property
def playlist_id(self):
"""Get the playlist id.
:rtype: str
"""
if self._playlist_id:
return self._playlist_id
self._playlist_id = extract.playlist_id(self._input_url)
return self._playlist_id
@property
def playlist_url(self):
"""Get the base playlist url.
:rtype: str
"""
return f"https://www.youtube.com/playlist?list={self.playlist_id}"
@property
def html(self):
"""Get the playlist page html.
:rtype: str
"""
if self._html:
return self._html
self._html = request.get(self.playlist_url)
return self._html
@property
def ytcfg(self):
"""Extract the ytcfg from the playlist page html.
:rtype: dict
"""
if self._ytcfg:
return self._ytcfg
self._ytcfg = extract.get_ytcfg(self.html)
return self._ytcfg
@property
def initial_data(self):
"""Extract the initial data from the playlist page html.
:rtype: dict
"""
if self._initial_data:
return self._initial_data
else:
self._initial_data = extract.initial_data(self.html)
return self._initial_data
@property
def sidebar_info(self):
"""Extract the sidebar info from the playlist page html.
:rtype: dict
"""
if self._sidebar_info:
return self._sidebar_info
else:
self._sidebar_info = self.initial_data['sidebar'][
'playlistSidebarRenderer']['items']
return self._sidebar_info
@property
def yt_api_key(self):
"""Extract the INNERTUBE_API_KEY from the playlist ytcfg.
:rtype: str
"""
return self.ytcfg['INNERTUBE_API_KEY']
def _paginate(
self, until_watch_id: Optional[str] = None
) -> Iterable[List[str]]:
"""Parse the video links from the page source, yields the /watch?v=
part from video link
:param until_watch_id Optional[str]: YouTube Video watch id until
which the playlist should be read.
:rtype: Iterable[List[str]]
:returns: Iterable of lists of YouTube watch ids
"""
videos_urls, continuation = self._extract_videos(
json.dumps(extract.initial_data(self.html))
)
if until_watch_id:
try:
trim_index = videos_urls.index(f"/watch?v={until_watch_id}")
yield videos_urls[:trim_index]
return
except ValueError:
pass
yield videos_urls
# Extraction from a playlist only returns 100 videos at a time
# if self._extract_videos returns a continuation there are more
# than 100 songs inside a playlist, so we need to add further requests
# to gather all of them
if continuation:
load_more_url, headers, data = self._build_continuation_url(continuation)
else:
load_more_url, headers, data = None, None, None
while load_more_url and headers and data: # there is an url found
logger.debug("load more url: %s", load_more_url)
# requesting the next page of videos with the url generated from the
# previous page, needs to be a post
req = request.post(load_more_url, extra_headers=headers, data=data)
# extract up to 100 songs from the page loaded
# returns another continuation if more videos are available
videos_urls, continuation = self._extract_videos(req)
if until_watch_id:
try:
trim_index = videos_urls.index(f"/watch?v={until_watch_id}")
yield videos_urls[:trim_index]
return
except ValueError:
pass
yield videos_urls
if continuation:
load_more_url, headers, data = self._build_continuation_url(
continuation
)
else:
load_more_url, headers, data = None, None, None
def _build_continuation_url(self, continuation: str) -> Tuple[str, dict, dict]:
"""Helper method to build the url and headers required to request
the next page of videos
:param str continuation: Continuation extracted from the json response
of the last page
:rtype: Tuple[str, dict, dict]
:returns: Tuple of an url and required headers for the next http
request
"""
return (
(
# was changed to this format (and post requests)
# between 2021.03.02 and 2021.03.03
"https://www.youtube.com/youtubei/v1/browse?key="
f"{self.yt_api_key}"
),
{
"X-YouTube-Client-Name": "1",
"X-YouTube-Client-Version": "2.20200720.00.02",
},
# extra data required for post request
{
"continuation": continuation,
"context": {
"client": {
"clientName": "WEB",
"clientVersion": "2.20200720.00.02"
}
}
}
)
@staticmethod
def _extract_videos(raw_json: str) -> Tuple[List[str], Optional[str]]:
"""Extracts videos from a raw json page
:param str raw_json: Input json extracted from the page or the last
server response
:rtype: Tuple[List[str], Optional[str]]
:returns: Tuple containing a list of up to 100 video watch ids and
a continuation token, if more videos are available
"""
initial_data = json.loads(raw_json)
try:
# this is the json tree structure, if the json was extracted from
# html
section_contents = initial_data["contents"][
"twoColumnBrowseResultsRenderer"][
"tabs"][0]["tabRenderer"]["content"][
"sectionListRenderer"]["contents"]
try:
# Playlist without submenus
important_content = section_contents[
0]["itemSectionRenderer"][
"contents"][0]["playlistVideoListRenderer"]
except (KeyError, IndexError, TypeError):
# Playlist with submenus
important_content = section_contents[
1]["itemSectionRenderer"][
"contents"][0]["playlistVideoListRenderer"]
videos = important_content["contents"]
except (KeyError, IndexError, TypeError):
try:
# this is the json tree structure, if the json was directly sent
# by the server in a continuation response
# no longer a list and no longer has the "response" key
important_content = initial_data['onResponseReceivedActions'][0][
'appendContinuationItemsAction']['continuationItems']
videos = important_content
except (KeyError, IndexError, TypeError) as p:
logger.info(p)
return [], None
try:
continuation = videos[-1]['continuationItemRenderer'][
'continuationEndpoint'
]['continuationCommand']['token']
videos = videos[:-1]
except (KeyError, IndexError):
# if there is an error, no continuation is available
continuation = None
# remove duplicates
return (
uniqueify(
list(
# only extract the video ids from the video data
map(
lambda x: (
f"/watch?v="
f"{x['playlistVideoRenderer']['videoId']}"
),
videos
)
),
),
continuation,
)
def trimmed(self, video_id: str) -> Iterable[str]:
"""Retrieve a list of YouTube video URLs trimmed at the given video ID
i.e. if the playlist has video IDs 1,2,3,4 calling trimmed(3) returns
[1,2]
:type video_id: str
video ID to trim the returned list of playlist URLs at
:rtype: List[str]
:returns:
List of video URLs from the playlist trimmed at the given ID
"""
for page in self._paginate(until_watch_id=video_id):
yield from (self._video_url(watch_path) for watch_path in page)
def url_generator(self):
"""Generator that yields video URLs.
:Yields: Video URLs
"""
for page in self._paginate():
for video in page:
yield self._video_url(video)
@property # type: ignore
@cache
def video_urls(self) -> DeferredGeneratorList:
"""Complete links of all the videos in playlist
:rtype: List[str]
:returns: List of video URLs
"""
return DeferredGeneratorList(self.url_generator())
def videos_generator(self):
for url in self.video_urls:
yield YouTube(url)
@property
def videos(self) -> Iterable[YouTube]:
"""Yields YouTube objects of videos in this playlist
:rtype: List[YouTube]
:returns: List of YouTube
"""
return DeferredGeneratorList(self.videos_generator())
def __getitem__(self, i: Union[slice, int]) -> Union[str, List[str]]:
return self.video_urls[i]
def __len__(self) -> int:
return len(self.video_urls)
def __repr__(self) -> str:
return f"{repr(self.video_urls)}"
@property
@cache
def last_updated(self) -> Optional[date]:
"""Extract the date that the playlist was last updated.
For some playlists, this will be a specific date, which is returned as a datetime
object. For other playlists, this is an estimate such as "1 week ago". Due to the
fact that this value is returned as a string, pytube does a best-effort parsing
where possible, and returns the raw string where it is not possible.
:return: Date of last playlist update where possible, else the string provided
:rtype: datetime.date
"""
last_updated_text = self.sidebar_info[0]['playlistSidebarPrimaryInfoRenderer'][
'stats'][2]['runs'][1]['text']
try:
date_components = last_updated_text.split()
month = date_components[0]
day = date_components[1].strip(',')
year = date_components[2]
return datetime.strptime(
f"{month} {day:0>2} {year}", "%b %d %Y"
).date()
except (IndexError, KeyError):
return last_updated_text
@property
@cache
def title(self) -> Optional[str]:
"""Extract playlist title
:return: playlist title (name)
:rtype: Optional[str]
"""
return self.sidebar_info[0]['playlistSidebarPrimaryInfoRenderer'][
'title']['runs'][0]['text']
@property
def description(self) -> str:
return self.sidebar_info[0]['playlistSidebarPrimaryInfoRenderer'][
'description']['simpleText']
@property
def length(self):
"""Extract the number of videos in the playlist.
:return: Playlist video count
:rtype: int
"""
count_text = self.sidebar_info[0]['playlistSidebarPrimaryInfoRenderer'][
'stats'][0]['runs'][0]['text']
count_text = count_text.replace(',','')
return int(count_text)
@property
def views(self):
"""Extract view count for playlist.
:return: Playlist view count
:rtype: int
"""
# "1,234,567 views"
views_text = self.sidebar_info[0]['playlistSidebarPrimaryInfoRenderer'][
'stats'][1]['simpleText']
# "1,234,567"
count_text = views_text.split()[0]
# "1234567"
count_text = count_text.replace(',', '')
return int(count_text)
@property
def owner(self):
"""Extract the owner of the playlist.
:return: Playlist owner name.
:rtype: str
"""
return self.sidebar_info[1]['playlistSidebarSecondaryInfoRenderer'][
'videoOwner']['videoOwnerRenderer']['title']['runs'][0]['text']
@property
def owner_id(self):
"""Extract the channel_id of the owner of the playlist.
:return: Playlist owner's channel ID.
:rtype: str
"""
return self.sidebar_info[1]['playlistSidebarSecondaryInfoRenderer'][
'videoOwner']['videoOwnerRenderer']['title']['runs'][0][
'navigationEndpoint']['browseEndpoint']['browseId']
@property
def owner_url(self):
"""Create the channel url of the owner of the playlist.
:return: Playlist owner's channel url.
:rtype: str
"""
return f'https://www.youtube.com/channel/{self.owner_id}'
@staticmethod
def _video_url(watch_path: str):
return f"https://www.youtube.com{watch_path}"
|
unlicense
|
a750825a6ad36ac1a7782e70397c068b
| 32.899761
| 89
| 0.558857
| 4.542373
| false
| false
| false
| false
|
mozilla-iam/cis
|
python-modules/cis_crypto/cis_crypto/cli.py
|
1
|
3035
|
#!/usr/bin/env python3
import argparse
import jose
import logging
import sys
from cis_crypto import common
from cis_crypto import operation
class cli:
def __init__(self):
self.config = None
self.prog = sys.argv[0].split("/")[-1]
def parse_args(self, args):
parser = argparse.ArgumentParser(
description="""
Command line wrapper for mozilla-iam sign verify/operations of JSON and YAML using JWKS.
"""
)
subparsers = parser.add_subparsers(dest="cryptographic-operation")
subparsers.required = True
sign_operation_parser = subparsers.add_parser(
"sign", help="Use a jwks key to generate a signature for a file. (Assumes a json or yaml file)"
)
sign_operation_parser.add_argument(
"--file", help="The path to the file you would like to sign. (Assumes a json or yaml file)"
)
sign_operation_parser.set_defaults(func="sign_operation")
verify_operation_parser = subparsers.add_parser(
"verify", help="Verify a signture with a known file. (Assumes a json file)"
)
verify_operation_parser.add_argument("--file", help="The path to the file you would like to sign.")
verify_operation_parser.set_defaults(func="verify_operation")
return parser.parse_args(args)
def run(self):
logger = logging.getLogger(__name__)
self.config = self.parse_args(sys.argv[1:])
if self.config.func == "sign_operation":
logger.info("Attempting to sign file: {}".format(self.config.file))
file_content = common.load_file(self.config.file)
signing_object = operation.Sign()
signing_object.load(file_content)
jws = signing_object.jws()
common.write_file(jws, "{}.jws".format(self.config.file))
logger.info("File signed. Your signed file is now: {}.jws".format(self.config.file))
logger.info("To verify this file use cis_crypto verify --file {}.jws".format(self.config.file))
elif self.config.func == "verify_operation":
logger.info("Attempting verification of signature for file: {}".format(self.config.file))
everett_config = common.get_config()
logger.info(
"Attempting fetch of .well-known data from: {}".format(
everett_config("public_key_name", namespace="cis", default="access-file-key.pub.pem")
)
)
file_content = common.load_file(self.config.file)
verify_object = operation.Verify()
verify_object.load(file_content)
try:
jws = verify_object.jws() # This will raise if the signature is invalid.
logger.info("Signature verified for file: {}".format(self.config.file))
except jose.exceptions.JWSError:
logger.error("The signature could not be verified.")
sys.exit()
sys.exit()
|
mpl-2.0
|
6e4ea185859404dd312ca4ca6712d897
| 40.013514
| 107
| 0.604942
| 4.062918
| false
| true
| false
| false
|
mozilla-iam/cis
|
python-modules/cis_notifications/cis_notifications/event.py
|
1
|
5547
|
import logging
import time
import requests
from cis_notifications import common
from cis_notifications import secret
logger = logging.getLogger(__name__)
def expired(ts, leeway=0):
return ts < time.time() + leeway
class Event(object):
"""Handle events from lambda and generate hooks out to publishers."""
def __init__(self, event):
"""[summary]
Arguments:
object {[type]} -- [an instance of the event class.]
event {[type]} -- [the event as ingested from the kinesis stream.]
subscriptions {[type]} -- [list of urls to post notifications to.]
"""
self.config = common.get_config()
self.event = event
self.secret_manager = secret.Manager()
self.access_token = None
def to_notification(self):
"""[summary]
Transform the instance of the event from the stream into a notification payload.
[return] JSON data structure to send using requests.
"""
logger.debug("An event was received", extra={"event": self.event})
updated_record = self.event.get("dynamodb")
operation = "foxy" # Just a place holder in case we have an unhandled event.
if self.event.get("eventName") == "INSERT":
operation = "create"
if self.event.get("eventName") == "MODIFY":
operation = "update"
if self.event.get("eventName") == "REMOVE":
operation = "delete"
if updated_record is not None:
# Provided the event is the structure that
notification = {
"operation": operation,
"id": updated_record["Keys"]["id"]["S"],
"time": updated_record["ApproximateCreationDateTime"],
}
logger.debug("Notification generated.", extra={"notification": notification})
return notification
else:
logger.debug("No notification generated.")
return {}
def send(self, notification):
"""[summary]
Get the list of notification endpoints from the object constructor and send a POST with the json payload.
Arguments:
object {[type]} -- [an instance of the event class.]
object {[notification]} -- [A json payload that you would like to send to the RP.]
[return] Dictionary of status codes by publisher.
"""
# Not in-memory access token?
if not self.access_token:
# Load whatever is in our secrets
self.access_token_dict = self.secret_manager.secretmgr("az_access_token")
# Check if what we had in secrets is still valid!
# This includes 10s leeway for clock sync issues and 15min (900s) for max-lambda function time.
# Since tokens are normally valid for 86400s (1 day) that should accomodate for all cases. If these were to
# be less than 15min for any reason, it would simply bypass the cache
if expired(float(self.access_token_dict.get("exp", 0.0)), leeway=910):
logger.info("Access token has expired, refreshing")
authzero = self._get_authzero_client()
self.access_token_dict = authzero.exchange_for_access_token()
# Auth0 gives us the difference (expires_in) not a time stamp, so we need to calculate when the token
# expires.
self.access_token_dict["exp"] = time.time() + float(self.access_token_dict.get("expires_in", 60.0))
self.secret_manager.secretmgr_store("az_access_token", self.access_token_dict)
else:
logger.info("Re-using cached access token")
self.access_token = self.access_token_dict["access_token"]
if notification != {}:
rp_urls = self.config(
"rp_urls", namespace="cis", default="https://dinopark.k8s.dev.sso.allizom.org/events/update"
)
results = {}
for url in rp_urls.split(","):
result = self._notify_via_post(url, notification, self.access_token)
results[url] = result
return results
def _get_authzero_client(self):
authzero = secret.AuthZero(
client_id=self.secret_manager.secret("client_id"),
client_secret=self.secret_manager.secret("client_secret"),
api_identifier=self.config("api_identifier", namespace="cis", default="hook.dev.sso.allizom.org"),
authzero_tenant=self.config("authzero_tenant", namespace="cis", default="auth.mozilla.auth0.com"),
)
return authzero
def _notify_via_post(self, url, json_payload, access_token):
"""[summary]
Notify a single publisher of the user_id that was updated and return only the status code.
Arguments:
url {[type]} -- [the url of the publisher you woud like to notify.]
json_payload {[type]} -- [the event to send to the publisher.]
"""
try:
response = requests.post(
url, json=json_payload, headers={"authorization": "Bearer {}".format(access_token)}
)
return response.status_code
except requests.exceptions.RequestException:
return "Unknown"
except requests.exceptions.HTTPError:
return "HTTPError"
except requests.exceptions.ConnectionError:
return "ConnectionError"
except requests.exceptions.Timeout:
return "Timeout"
|
mpl-2.0
|
0d336f640d1dc4f3cb7fa7d1034befed
| 38.340426
| 119
| 0.597801
| 4.381517
| false
| false
| false
| false
|
mozilla-iam/cis
|
python-modules/cis_profile_retrieval_service/cis_profile_retrieval_service/schema.py
|
1
|
1800
|
import json
import graphene
import cis_profile.graphene
from cis_identity_vault.models import user
from cis_profile_retrieval_service.common import get_table_resource
def is_json(payload):
"""Check if a payload is valid JSON."""
try:
json.loads(payload)
except (TypeError, ValueError):
return False
else:
return True
class Query(graphene.ObjectType):
"""GraphQL Query class for the V2 Profiles."""
profiles = graphene.List(cis_profile.graphene.Profile, primaryEmail=graphene.String(required=False))
profile = graphene.Field(cis_profile.graphene.Profile, userId=graphene.String(required=True))
def resolve_profiles(self, info, **kwargs):
"""GraphQL resolver for the profiles attribute."""
table = get_table_resource()
vault = user.Profile(table)
profiles = []
if kwargs.get("primaryEmail"):
search = vault.find_by_email(kwargs.get("primaryEmail"))
if len(search.get("Items")) > 0:
for profile in search.get("Items"):
profiles.append(json.loads())
else:
for vault_profile in vault.all:
profiles.append(json.loads(vault_profile.get("profile")))
def resolve_profile(self, info, **kwargs):
"""GraphQL resolver for a single profile."""
table = get_table_resource()
vault = user.Profile(table)
if kwargs.get("userId"):
search = vault.find_by_id(kwargs.get("userId"))
if len(search.get("Items")) > 0:
resp = search["Items"][0]["profile"]
else:
resp = json.dumps({})
return resp
class AuthorizationMiddleware:
def resolve(self, next, root, info, **kwargs):
return next(root, info, **kwargs)
|
mpl-2.0
|
2d73a38b2f7944d0edc6af411dde9e66
| 32.333333
| 104
| 0.618889
| 4.026846
| false
| false
| false
| false
|
mozilla-iam/cis
|
python-modules/cis_crypto/setup.py
|
1
|
1388
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open("README.md", "r") as fh:
long_description = fh.read()
requirements = [
"python-jose[cryptography]",
"cryptography",
"everett",
"everett[ini]",
"configobj",
"boto3",
"boto",
"botocore",
"requests",
"pyaml",
]
setup_requirements = ["pytest-runner", "setuptools>=40.5.0"]
test_requirements = ["pytest", "pytest-watch", "pytest-cov", "pytest-mock", "moto", "mock", "flake8", "cis_profile"]
extras = {"test": test_requirements}
setup(
name="cis_crypto",
version="0.0.1",
author="Andrew Krug",
author_email="akrug@mozilla.com",
description="Per attribute signature system for jwks sign-verify in mozilla-iam.",
long_description=long_description,
url="https://github.com/mozilla-iam/cis",
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Mozilla Public License",
"Operating System :: OS Independent",
],
install_requires=requirements,
license="Mozilla Public License 2.0",
include_package_data=True,
packages=find_packages(include=["cis_crypto", "bin"]),
scripts=["bin/cis_crypto"],
setup_requires=setup_requirements,
test_suite="tests",
tests_require=test_requirements,
extras_require=extras,
zip_safe=False,
)
|
mpl-2.0
|
1c316fc1356dd8d975ed5651133bf6c5
| 26.215686
| 116
| 0.64121
| 3.435644
| false
| true
| false
| false
|
ibm-watson-iot/iot-python
|
test/test_api_registry_devicetypes.py
|
2
|
6161
|
# *****************************************************************************
# Copyright (c) 2019 IBM Corporation and other Contributors.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Eclipse Public License v1.0
# which accompanies this distribution, and is available at
# http://www.eclipse.org/legal/epl-v10.html
# *****************************************************************************
import uuid
import pytest
import testUtils
from wiotp.sdk.api.registry.devices import DeviceInfo
from wiotp.sdk.exceptions import ApiException
class TestRegistryDevicetypes(testUtils.AbstractTest):
# =========================================================================
# Device Type tests
# =========================================================================
def testDeviceTypeExistsCheck(self, deviceType):
if deviceType.id in self.appClient.registry.devicetypes:
pass
else:
raise Exception()
if "doesntexist" not in self.appClient.registry.devicetypes:
pass
else:
raise Exception()
def testGetDeviceType(self, deviceType):
retrievedDeviceType = self.appClient.registry.devicetypes[deviceType.id]
assert retrievedDeviceType.id == deviceType.id
assert retrievedDeviceType.classId == "Device"
def testGetDeviceTypeThatDoesntExist(self):
with pytest.raises(Exception):
self.appClient.registry.devicetypes["doesntexist"]
def testUnsupportedCreateUpdate(self):
with pytest.raises(Exception):
self.appClient.registry.devicetypes["d:hldtxx:vm:iot-test-06"] = {"foo", "bar"}
def testListDeviceTypes(self, deviceType):
count = 0
for type in self.appClient.registry.devicetypes:
count += 1
if count > 10:
break
# DeviceTypeDescription test
def testCreateDeviceType(self):
typeId = str(uuid.uuid4())
myDeviceType = self.appClient.registry.devicetypes.create({"id": typeId, "description": "This is a test"})
myDeviceTypeRetrieved = self.appClient.registry.devicetypes[typeId]
assert myDeviceTypeRetrieved.id == typeId
assert myDeviceTypeRetrieved.description == "This is a test"
del self.appClient.registry.devicetypes[typeId]
def testCreateDeviceTypeNone(self):
typeId = str(uuid.uuid4())
myDeviceType = self.appClient.registry.devicetypes.create({"id": typeId, "description": None})
myDeviceTypeRetrieved = self.appClient.registry.devicetypes[typeId]
assert myDeviceTypeRetrieved.id == typeId
assert myDeviceTypeRetrieved.description == None
del self.appClient.registry.devicetypes[typeId]
# Metadata test
def testCreateDeviceMetadata(self):
typeId = str(uuid.uuid4())
myDeviceType = self.appClient.registry.devicetypes.create(
{"id": typeId, "description": "This is still a test", "metadata": {"test": "test"}}
)
myDeviceTypeRetrieved = self.appClient.registry.devicetypes[typeId]
assert myDeviceTypeRetrieved.id == typeId
assert myDeviceTypeRetrieved.description == "This is still a test"
assert myDeviceTypeRetrieved.metadata == {"test": "test"}
del self.appClient.registry.devicetypes[typeId]
def testCreateDeviceMetadataNone(self):
typeId = str(uuid.uuid4())
myDeviceType = self.appClient.registry.devicetypes.create(
{"id": typeId, "description": "This is still a test", "metadata": None}
)
myDeviceTypeRetrieved = self.appClient.registry.devicetypes[typeId]
assert myDeviceTypeRetrieved.id == typeId
assert myDeviceTypeRetrieved.description == "This is still a test"
assert myDeviceTypeRetrieved.metadata == None
del self.appClient.registry.devicetypes[typeId]
def testUpdateDeviceType(self, deviceType):
self.appClient.registry.devicetypes.update(deviceType.id, description="This is still a test")
updatedDeviceType = self.appClient.registry.devicetypes[deviceType.id]
assert updatedDeviceType.description == "This is still a test"
def testUpdateDeviceInfo(self, deviceType):
self.appClient.registry.devicetypes.update(deviceType.id, deviceInfo=DeviceInfo(serialNumber="111"))
updatedDeviceType = self.appClient.registry.devicetypes[deviceType.id]
assert updatedDeviceType.deviceInfo.serialNumber == "111"
# =========================================================================
# Device under DeviceType tests
# =========================================================================
def testDeviceExistsCheck(self, deviceType, device):
if device.deviceId in deviceType.devices:
pass
else:
raise Exception()
if "wheredidyago" not in deviceType.devices:
pass
else:
raise Exception()
def testGetDeviceFromDeviceType(self, deviceType, device):
myDevice = self.appClient.registry.devicetypes[deviceType.id].devices[device.deviceId]
def testListDevicesFromDeviceType(self, deviceType, device):
# Get a device, and cache the response in a local object
count = 0
for device in deviceType.devices:
count += 1
if count > 10:
break
def testCreateDeviceType(self):
with pytest.raises(ApiException):
typeId = 1
r = self.appClient.registry.devicetypes.create(typeId)
def testUpdateDeviceType(self):
with pytest.raises(ApiException):
data = None
r = self.appClient.registry.devicetypes.update(data)
def testDeleteTypeId(self, device, deviceType):
typeId = str(uuid.uuid4())
self.appClient.registry.devicetypes.create(
{"id": typeId, "description": "This is still a test", "metadata": {"test": "test"}}
)
self.appClient.registry.devicetypes.delete(typeId)
assert typeId not in deviceType.devices
|
epl-1.0
|
03ebfc866c44dd5b025dafca891c805b
| 38
| 114
| 0.627333
| 4.388177
| false
| true
| false
| false
|
ibm-watson-iot/iot-python
|
samples/deviceFactory/deviceStatus.py
|
2
|
3130
|
#!/usr/bin/env python
# *****************************************************************************
# Copyright (c) 2019 IBM Corporation and other Contributors.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Eclipse Public License v1.0
# which accompanies this distribution, and is available at
# http://www.eclipse.org/legal/epl-v10.html
# *****************************************************************************
import argparse
import sys
import os
import yaml
import wiotp.sdk.application
def loadConfigFile(source):
data = {}
with open(source, "r") as sourceFile:
data = yaml.full_load(sourceFile)
return data
if __name__ == "__main__":
# Initialize the properties we need
parser = argparse.ArgumentParser(
description="IBM Watson IoT Platform Device Status. For more information see https://github.com/ibm-watson-iot/iot-python/samples/deviceFactory"
)
parser.add_argument(
"-t",
"--typeId",
required=False,
default="iotpsutil",
help="Set the typeId for the device batch. Defaults to iotpsutil",
)
parser.add_argument(
"-b",
"--batchId",
required=True,
help="DeviceIDs will be prefixed by the batch number, e.g. batchID-0001, batchID-0002",
)
parser.add_argument(
"-n",
"--numberOfDevices",
required=True,
type=int,
help="How many device configuration files should be produced by the factory. Max value is 1000",
)
args, unknown = parser.parse_known_args()
options = wiotp.sdk.application.parseEnvVars()
client = wiotp.sdk.application.ApplicationClient(options)
# Terminal colour mods
red = "%c[31m" % chr(27)
green = "%c[32m" % chr(27)
off = "%c[0m" % chr(27)
statuses = client.registry.connectionStatus.find(typeId=args.typeId)
output = {}
for status in statuses:
# print(status)
clientId = status["id"]
deviceId = clientId.split(":")[3]
if not deviceId.startswith(args.batchId):
continue
(batchId, batchNum) = clientId.split("-")
if status["connectionStatus"] == "disconnected":
output[batchNum] = "%s%s%s" % (red, batchNum, off)
elif status["connectionStatus"] == "connected":
output[batchNum] = "%s%s%s" % (green, batchNum, off)
else:
output[batchNum] = "%s" % (batchNum)
print("=================================================")
print("Device Connection State Report")
print("")
print("%s:%s-x" % (args.typeId, args.batchId))
print("")
print("%sconnected%s / %sdisconnected%s / unknown" % (green, off, red, off))
print("=================================================")
outStr = ""
for i in range(1, args.numberOfDevices + 1):
batchNum = "%04d" % (i)
if batchNum in output:
outStr += output[batchNum] + " "
else:
outStr += batchNum + " "
if batchNum[3] == "0":
outStr += "\n"
print(outStr)
|
epl-1.0
|
0436ee43d53172ed8b41cd5cf2c4b40d
| 30.938776
| 153
| 0.555911
| 3.927227
| false
| false
| false
| false
|
ibm-watson-iot/iot-python
|
src/wiotp/sdk/device/managedClient.py
|
2
|
27070
|
# *****************************************************************************
# Copyright (c) 2014, 2018 IBM Corporation and other Contributors.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Eclipse Public License v1.0
# which accompanies this distribution, and is available at
# http://www.eclipse.org/legal/epl-v10.html
# *****************************************************************************
from datetime import datetime
import json
import logging
import threading
import pytz
import uuid
from wiotp.sdk import ConnectionException, ConfigurationException
from wiotp.sdk.device.client import DeviceClient
from wiotp.sdk.device.deviceInfo import DeviceInfo
from wiotp.sdk.device.deviceFirmware import DeviceFirmware
class ManagedDeviceClient(DeviceClient):
# Publish MQTT topics
MANAGE_TOPIC = "iotdevice-1/mgmt/manage"
UNMANAGE_TOPIC = "iotdevice-1/mgmt/unmanage"
UPDATE_LOCATION_TOPIC = "iotdevice-1/device/update/location"
ADD_ERROR_CODE_TOPIC = "iotdevice-1/add/diag/errorCodes"
CLEAR_ERROR_CODES_TOPIC = "iotdevice-1/clear/diag/errorCodes"
NOTIFY_TOPIC = "iotdevice-1/notify"
RESPONSE_TOPIC = "iotdevice-1/response"
ADD_LOG_TOPIC = "iotdevice-1/add/diag/log"
CLEAR_LOG_TOPIC = "iotdevice-1/clear/diag/log"
# Subscribe MQTT topics
DM_RESPONSE_TOPIC = "iotdm-1/response"
DM_OBSERVE_TOPIC = "iotdm-1/observe"
DM_REBOOT_TOPIC = "iotdm-1/mgmt/initiate/device/reboot"
DM_FACTORY_REESET = "iotdm-1/mgmt/initiate/device/factory_reset"
DM_UPDATE_TOPIC = "iotdm-1/device/update"
DM_CANCEL_OBSERVE_TOPIC = "iotdm-1/cancel"
DM_FIRMWARE_DOWNLOAD_TOPIC = "iotdm-1/mgmt/initiate/firmware/download"
DM_FIRMWARE_UPDATE_TOPIC = "iotdm-1/mgmt/initiate/firmware/update"
DME_ACTION_TOPIC = "iotdm-1/mgmt/custom/#"
# ResponceCode
RESPONSECODE_FUNCTION_NOT_SUPPORTED = 501
RESPONSECODE_ACCEPTED = 202
RESPONSECODE_INTERNAL_ERROR = 500
RESPONSECODE_BAD_REQUEST = 400
UPDATESTATE_IDLE = 0
UPDATESTATE_DOWNLOADING = 1
UPDATESTATE_DOWNLOADED = 2
UPDATESTATE_SUCCESS = 0
UPDATESTATE_IN_PROGRESS = 1
UPDATESTATE_OUT_OF_MEMORY = 2
UPDATESTATE_CONNECTION_LOST = 3
UPDATESTATE_VERIFICATION_FAILED = 4
UPDATESTATE_UNSUPPORTED_IMAGE = 5
UPDATESTATE_INVALID_URI = 6
def __init__(self, config, logHandlers=None, deviceInfo=None):
if config["identity"]["orgId"] == "quickstart":
raise ConfigurationException("QuickStart does not support device management")
DeviceClient.__init__(self, config, logHandlers)
# Initialize user supplied callback
self.deviceActionCallback = None
self.firmwereActionCallback = None
self.dmeActionCallback = None
messages_callbacks = (
("iotdm-1/#", self.__onDeviceMgmtResponse),
(ManagedDeviceClient.DM_REBOOT_TOPIC, self.__onRebootRequest),
(ManagedDeviceClient.DM_FACTORY_REESET, self.__onFactoryResetRequest),
(ManagedDeviceClient.DM_FIRMWARE_UPDATE_TOPIC, self.__onFirmwereUpdate),
(ManagedDeviceClient.DM_OBSERVE_TOPIC, self.__onFirmwereObserve),
(ManagedDeviceClient.DM_FIRMWARE_DOWNLOAD_TOPIC, self.__onFirmwereDownload),
(ManagedDeviceClient.DM_UPDATE_TOPIC, self.__onUpdatedDevice),
(ManagedDeviceClient.DM_CANCEL_OBSERVE_TOPIC, self.__onFirmwereCancel),
(ManagedDeviceClient.DME_ACTION_TOPIC, self.__onDMEActionRequest),
)
# Add handler for supported device management commands
for message, callback in messages_callbacks:
self.client.message_callback_add(message, callback)
# Initialize user supplied callback
self.client.on_subscribe = self._onSubscribe
self.client.on_disconnect = self._onDisconnect
self.readyForDeviceMgmt = threading.Event()
# List of DM requests that have not received a response yet
self._deviceMgmtRequestsPendingLock = threading.Lock()
self._deviceMgmtRequestsPending = {}
# List of DM notify hook
self._deviceMgmtObservationsLock = threading.Lock()
self._deviceMgmtObservations = []
# Initialize local device data model
self.metadata = {}
if deviceInfo is not None:
self._deviceInfo = deviceInfo
else:
self._deviceInfo = DeviceInfo()
self._location = None
self._errorCode = None
self.__firmwareUpdate = None
self.manageTimer = None
# Register startup subscription list
self._subscriptions[self.DM_RESPONSE_TOPIC] = 1
self._subscriptions[self.DM_OBSERVE_TOPIC] = 1
self._subscriptions[self.DM_REBOOT_TOPIC] = 1
self._subscriptions[self.DM_FACTORY_REESET] = 1
self._subscriptions[self.DM_UPDATE_TOPIC] = 1
self._subscriptions[self.DM_FIRMWARE_UPDATE_TOPIC] = 1
self._subscriptions[self.DM_FIRMWARE_DOWNLOAD_TOPIC] = 1
self._subscriptions[self.DM_CANCEL_OBSERVE_TOPIC] = 1
self._subscriptions[self._COMMAND_TOPIC] = 1
self._subscriptions[self.DME_ACTION_TOPIC] = 1
def setProperty(self, name, value):
if name not in [
"serialNumber",
"manufacturer",
"model",
"deviceClass",
"description",
"fwVersion",
"hwVersion",
"descriptiveLocation",
]:
raise Exception("Unsupported property name: %s" % name)
self._deviceInfo[name] = value
return self.notifyFieldChange("deviceInfo.%s" % name, value)
def notifyFieldChange(self, field, value):
with self._deviceMgmtObservationsLock:
if field in self._deviceMgmtObservations:
if not self.readyForDeviceMgmt.wait(timeout=10):
self.logger.warning(
"Unable to notify service of field "
"change because device is not ready "
"for device management"
)
return threading.Event().set()
reqId = str(uuid.uuid4())
message = {"d": {"field": field, "value": value}, "reqId": reqId}
resolvedEvent = threading.Event()
self.client.publish(ManagedDeviceClient.NOTIFY_TOPIC, payload=json.dumps(message), qos=1, retain=False)
with self._deviceMgmtRequestsPendingLock:
self._deviceMgmtRequestsPending[reqId] = {
"topic": ManagedDeviceClient.NOTIFY_TOPIC,
"message": message,
"event": resolvedEvent,
}
return resolvedEvent
else:
return threading.Event().set()
def _onSubscribe(self, mqttc, userdata, mid, granted_qos):
super(ManagedDeviceClient, self)._onSubscribe(mqttc, userdata, mid, granted_qos)
# Once IoTF acknowledges the subscriptions we are able to process commands and responses from device management server
self.manage()
def manage(
self,
lifetime=3600,
supportDeviceActions=True,
supportFirmwareActions=True,
supportDeviceMgmtExtActions=False,
bundleIds=[],
):
# TODO: throw an error, minimum lifetime this client will support is 1 hour, but for now set lifetime to infinite if it's invalid
if lifetime < 3600:
lifetime = 0
if not self.subscriptionsAcknowledged.wait(timeout=10):
self.logger.warning(
"Unable to send register for device " "management because device subscriptions " "are not in place"
)
return threading.Event().set()
reqId = str(uuid.uuid4())
message = {
"d": {
"lifetime": lifetime,
"supports": {"deviceActions": supportDeviceActions, "firmwareActions": supportFirmwareActions},
"deviceInfo": self._deviceInfo.__dict__,
"metadata": self.metadata,
},
"reqId": reqId,
}
if supportDeviceMgmtExtActions and len(bundleIds) > 0:
for bundleId in bundleIds:
message["d"]["supports"][bundleId] = supportDeviceMgmtExtActions
resolvedEvent = threading.Event()
self.client.publish(ManagedDeviceClient.MANAGE_TOPIC, payload=json.dumps(message), qos=1, retain=False)
with self._deviceMgmtRequestsPendingLock:
self._deviceMgmtRequestsPending[reqId] = {
"topic": ManagedDeviceClient.MANAGE_TOPIC,
"message": message,
"event": resolvedEvent,
}
# Register the future call back to Watson IoT Platform 2 minutes before the device lifetime expiry
if lifetime != 0:
if self.manageTimer is not None:
self.logger.debug("Cancelling existing manage timer")
self.manageTimer.cancel()
self.manageTimer = threading.Timer(
lifetime - 120,
self.manage,
[lifetime, supportDeviceActions, supportFirmwareActions, supportDeviceMgmtExtActions, bundleIds],
)
self.manageTimer.start()
return resolvedEvent
def unmanage(self):
if not self.readyForDeviceMgmt.wait(timeout=10):
self.logger.warning(
"Unable to set device to unmanaged because " "device is not ready for device management"
)
return threading.Event().set()
reqId = str(uuid.uuid4())
message = {"reqId": reqId}
resolvedEvent = threading.Event()
self.client.publish(ManagedDeviceClient.UNMANAGE_TOPIC, payload=json.dumps(message), qos=1, retain=False)
with self._deviceMgmtRequestsPendingLock:
self._deviceMgmtRequestsPending[reqId] = {
"topic": ManagedDeviceClient.UNMANAGE_TOPIC,
"message": message,
"event": resolvedEvent,
}
return resolvedEvent
def setLocation(self, longitude, latitude, elevation=None, accuracy=None):
# TODO: Add validation (e.g. ensure numeric values)
if self._location is None:
self._location = {}
self._location["longitude"] = longitude
self._location["latitude"] = latitude
if elevation:
self._location["elevation"] = elevation
self._location["measuredDateTime"] = datetime.now(pytz.timezone("UTC")).isoformat()
if accuracy:
self._location["accuracy"] = accuracy
elif "accuracy" in self._location:
del self._location["accuracy"]
if not self.readyForDeviceMgmt.wait(timeout=10):
self.logger.warning(
"Unable to publish device location because " "device is not ready for device management"
)
return threading.Event().set()
reqId = str(uuid.uuid4())
message = {"d": self._location, "reqId": reqId}
resolvedEvent = threading.Event()
self.client.publish(ManagedDeviceClient.UPDATE_LOCATION_TOPIC, payload=json.dumps(message), qos=1, retain=False)
with self._deviceMgmtRequestsPendingLock:
self._deviceMgmtRequestsPending[reqId] = {
"topic": ManagedDeviceClient.UPDATE_LOCATION_TOPIC,
"message": message,
"event": resolvedEvent,
}
return resolvedEvent
def setErrorCode(self, errorCode=0):
if errorCode is None:
errorCode = 0
self._errorCode = errorCode
if not self.readyForDeviceMgmt.wait(timeout=10):
self.logger.warning("Unable to publish error code because " "device is not ready for device management")
return threading.Event().set()
reqId = str(uuid.uuid4())
message = {"d": {"errorCode": errorCode}, "reqId": reqId}
resolvedEvent = threading.Event()
self.client.publish(ManagedDeviceClient.ADD_ERROR_CODE_TOPIC, payload=json.dumps(message), qos=1, retain=False)
with self._deviceMgmtRequestsPendingLock:
self._deviceMgmtRequestsPending[reqId] = {
"topic": ManagedDeviceClient.ADD_ERROR_CODE_TOPIC,
"message": message,
"event": resolvedEvent,
}
return resolvedEvent
def clearErrorCodes(self):
self._errorCode = None
if not self.readyForDeviceMgmt.wait(timeout=10):
self.logger.warning("Unable to clear error codes because " "device is not ready for device management")
return threading.Event().set()
reqId = str(uuid.uuid4())
message = {"reqId": reqId}
resolvedEvent = threading.Event()
self.client.publish(
ManagedDeviceClient.CLEAR_ERROR_CODES_TOPIC, payload=json.dumps(message), qos=1, retain=False
)
with self._deviceMgmtRequestsPendingLock:
self._deviceMgmtRequestsPending[reqId] = {
"topic": ManagedDeviceClient.CLEAR_ERROR_CODES_TOPIC,
"message": message,
"event": resolvedEvent,
}
return resolvedEvent
def addLog(self, msg="", data="", sensitivity=0):
timestamp = datetime.now().isoformat()
if not self.readyForDeviceMgmt.wait(timeout=10):
self.logger.warning("Unable to publish error code because " "device is not ready for device management")
return threading.Event().set()
reqId = str(uuid.uuid4())
message = {"d": {"message": msg, "timestamp": timestamp, "data": data, "severity": sensitivity}, "reqId": reqId}
resolvedEvent = threading.Event()
self.client.publish(ManagedDeviceClient.ADD_LOG_TOPIC, payload=json.dumps(message), qos=1, retain=False)
with self._deviceMgmtRequestsPendingLock:
self._deviceMgmtRequestsPending[reqId] = {
"topic": ManagedDeviceClient.ADD_LOG_TOPIC,
"message": message,
"event": resolvedEvent,
}
return resolvedEvent
def clearLog(self):
if not self.readyForDeviceMgmt.wait(timeout=10):
self.logger.warning("Unable to clear log because device is not ready for device management")
return threading.Event().set()
reqId = str(uuid.uuid4())
message = {"reqId": reqId}
resolvedEvent = threading.Event()
self.client.publish(ManagedDeviceClient.CLEAR_LOG_TOPIC, payload=json.dumps(message), qos=1, retain=False)
with self._deviceMgmtRequestsPendingLock:
self._deviceMgmtRequestsPending[reqId] = {
"topic": ManagedDeviceClient.CLEAR_LOG_TOPIC,
"message": message,
"event": resolvedEvent,
}
return resolvedEvent
def __onDeviceMgmtResponse(self, client, userdata, pahoMessage):
try:
data = json.loads(pahoMessage.payload.decode("utf-8"))
if "rc" not in data:
return True
rc = data["rc"]
reqId = data["reqId"]
except ValueError as e:
raise Exception('Unable to parse JSON. payload="%s" error=%s' % (pahoMessage.payload, str(e)))
else:
request = None
with self._deviceMgmtRequestsPendingLock:
try:
request = self._deviceMgmtRequestsPending.pop(reqId)
except KeyError:
self.logger.warning("Received unexpected response from " "device management: %s", reqId)
else:
self.logger.debug(
"Remaining unprocessed device " "management requests: %s", len(self._deviceMgmtRequestsPending)
)
if request is None:
return False
state = {
ManagedDeviceClient.MANAGE_TOPIC: {
# rc, json.dumps(request['message'])
"msg_succ": "[%s] Manage action completed: %s",
"msg_fail": "[%s] Manage action failed: %s",
},
ManagedDeviceClient.UNMANAGE_TOPIC: {
"msg_succ": "[%s] Unmanage action completed: %s",
"msg_fail": "[%s] Unmanage action failed: %s",
},
ManagedDeviceClient.UPDATE_LOCATION_TOPIC: {
"msg_succ": "[%s] Location update action completed: %s",
"msg_fail": "[%s] Location update action failed: %s",
},
ManagedDeviceClient.ADD_ERROR_CODE_TOPIC: {
"msg_succ": "[%s] Add error code action completed: %s",
"msg_fail": "[%s] Add error code action failed: %s",
},
ManagedDeviceClient.CLEAR_ERROR_CODES_TOPIC: {
"msg_succ": "[%s] Clear error codes action completed: %s",
"msg_fail": "[%s] Clear error codes action failed: %s",
},
ManagedDeviceClient.ADD_LOG_TOPIC: {
"msg_succ": "[%s] Add log action completed: %s",
"msg_fail": "[%s] Add log action failed: %s",
},
ManagedDeviceClient.CLEAR_LOG_TOPIC: {
"msg_succ": "[%s] Clear log action completed: %s",
"msg_fail": "[%s] Clear log action failed: %s",
},
}
try:
msg_succ = state[request["topic"]]["msg_succ"]
msg_fail = state[request["topic"]]["msg_fail"]
except Exception as e:
self.logger.warning("[%s] Unknown action response: %s", rc, json.dumps(request["message"]))
else:
dump_str = json.dumps(request["message"])
if rc == 200:
self.logger.info(msg_succ, rc, dump_str)
else:
self.logger.critical(msg_fail, rc, dump_str)
if request["topic"] == ManagedDeviceClient.MANAGE_TOPIC:
self.readyForDeviceMgmt.set()
elif request["topic"] == ManagedDeviceClient.UNMANAGE_TOPIC:
self.readyForDeviceMgmt.clear()
# Now clear the event, allowing anyone that was waiting on this to proceed
request["event"].set()
return True
# Device Action Handlers
def __onRebootRequest(self, client, userdata, pahoMessage):
paho_payload = pahoMessage.payload.decode("utf-8")
self.logger.info(
"Message received on topic :%s with payload %s", ManagedDeviceClient.DM_REBOOT_TOPIC, paho_payload
)
try:
data = json.loads(paho_payload)
reqId = data["reqId"]
if self.deviceActionCallback:
self.deviceActionCallback(reqId, "reboot")
except ValueError as e:
raise Exception('Unable to process Reboot request. payload="%s" error=%s' % (pahoMessage.payload, str(e)))
def __onFactoryResetRequest(self, client, userdata, pahoMessage):
paho_payload = pahoMessage.payload.decode("utf-8")
self.logger.info(
"Message received on topic :%s with payload %s", ManagedDeviceClient.DM_FACTORY_REESET, paho_payload
)
try:
data = json.loads(paho_payload)
reqId = data["reqId"]
if self.deviceActionCallback:
self.deviceActionCallback(reqId, "reset")
except ValueError as e:
raise Exception(
'Unable to process Factory Reset request. payload="%s" error=%s' % (pahoMessage.payload, str(e))
)
def respondDeviceAction(self, reqId, responseCode=202, message=""):
response = {"rc": responseCode, "message": message, "reqId": reqId}
payload = json.dumps(response)
self.logger.info("Publishing Device Action response with payload :%s", payload)
self.client.publish("iotdevice-1/response", payload, qos=1, retain=False)
# Firmware Handlers
def __onFirmwereDownload(self, client, userdata, pahoMessage):
paho_payload = pahoMessage.payload.decode("utf-8")
self.logger.info(
"Message received on topic :%s with payload %s",
ManagedDeviceClient.DM_FIRMWARE_DOWNLOAD_TOPIC,
paho_payload,
)
data = json.loads(paho_payload)
reqId = data["reqId"]
rc = ManagedDeviceClient.RESPONSECODE_ACCEPTED
msg = ""
if self.__firmwareUpdate.state != ManagedDeviceClient.UPDATESTATE_IDLE:
rc = ManagedDeviceClient.RESPONSECODE_BAD_REQUEST
msg = "Cannot download as the device is not in idle state"
thread = threading.Thread(target=self.respondDeviceAction, args=(reqId, rc, msg), name="respondDeviceAction")
thread.start()
if self.firmwereActionCallback:
self.firmwereActionCallback("download", self.__firmwareUpdate)
def __onFirmwereCancel(self, client, userdata, pahoMessage):
paho_payload = pahoMessage.payload.decode("utf-8")
self.logger.info(
"Message received on topic :%s with payload %s", ManagedDeviceClient.DM_CANCEL_OBSERVE_TOPIC, paho_payload
)
data = json.loads(paho_payload)
reqId = data["reqId"]
thread = threading.Thread(target=self.respondDeviceAction, args=(reqId, 200, ""), name="respondDeviceAction")
thread.start()
def __onFirmwereObserve(self, client, userdata, pahoMessage):
paho_payload = pahoMessage.payload.decode("utf-8")
self.logger.info(
"Message received on topic :%s with payload %s", ManagedDeviceClient.DM_OBSERVE_TOPIC, paho_payload
)
data = json.loads(paho_payload)
reqId = data["reqId"]
# TODO: Proprer validation for fields in payload
thread = threading.Thread(target=self.respondDeviceAction, args=(reqId, 200, ""), name="respondDeviceAction")
thread.start()
def __onUpdatedDevice(self, client, userdata, pahoMessage):
paho_payload = pahoMessage.payload.decode("utf-8")
self.logger.info(
"Message received on topic :%s with payload %s", ManagedDeviceClient.DM_UPDATE_TOPIC, paho_payload
)
data = json.loads(paho_payload)
if "reqId" in data:
reqId = data["reqId"]
d = data["d"]
value = None
for obj in d["fields"]:
if "field" in obj:
if obj["field"] == "mgmt.firmware":
value = obj["value"]
if value is not None:
self.__firmwareUpdate = DeviceFirmware(
value["version"],
value["name"],
value["uri"],
value["verifier"],
value["state"],
value["updateStatus"],
value["updatedDateTime"],
)
thread = threading.Thread(
target=self.respondDeviceAction, args=(reqId, 204, ""), name="respondDeviceAction"
)
thread.start()
else:
d = data["d"]
value = None
for obj in d["fields"]:
if "field" in obj:
if obj["field"] == "metadata":
value = obj["value"]
if value is not None:
self.metadata = value
def setState(self, status):
notify = {"d": {"fields": [{"field": "mgmt.firmware", "value": {"state": status}}]}}
if self.__firmwareUpdate is not None:
self.__firmwareUpdate.state = status
self.logger.info("Publishing state Update with payload :%s", json.dumps(notify))
thread = threading.Thread(
target=self.client.publish, args=("iotdevice-1/notify", json.dumps(notify), 1, False), name="client.publish"
)
thread.start()
def setUpdateStatus(self, status):
notify = {
"d": {
"fields": [
{
"field": "mgmt.firmware",
"value": {"state": ManagedDeviceClient.UPDATESTATE_IDLE, "updateStatus": status},
}
]
}
}
if self.__firmwareUpdate is not None:
self.__firmwareUpdate.state = ManagedDeviceClient.UPDATESTATE_IDLE
self.__firmwareUpdate.updateStatus = status
self.logger.info("Publishing Update Status with payload :%s", json.dumps(notify))
thread = threading.Thread(
target=self.client.publish, args=("iotdevice-1/notify", json.dumps(notify), 1, False), name="client.publish"
)
thread.start()
def __onFirmwereUpdate(self, client, userdata, pahoMessage):
paho_payload = pahoMessage.payload.decode("utf-8")
self.logger.info(
"Message received on topic :%s with payload %s", ManagedDeviceClient.DM_FIRMWARE_UPDATE_TOPIC, paho_payload
)
data = json.loads(paho_payload)
reqId = data["reqId"]
rc = ManagedDeviceClient.RESPONSECODE_ACCEPTED
msg = ""
if self.__firmwareUpdate.state != ManagedDeviceClient.UPDATESTATE_DOWNLOADED:
rc = ManagedDeviceClient.RESPONSECODE_BAD_REQUEST
msg = "Firmware is still not successfully downloaded."
thread = threading.Thread(target=self.respondDeviceAction, args=(reqId, rc, msg), name="respondDeviceAction")
thread.start()
if self.firmwereActionCallback:
self.firmwereActionCallback("update", self.__firmwareUpdate)
def __onDMEActionRequest(self, client, userdata, pahoMessage):
data = json.loads(pahoMessage.payload.decode("utf-8"))
self.logger.info("Message received on topic :%s with payload %s", ManagedDeviceClient.DME_ACTION_TOPIC, data)
reqId = data["reqId"]
if self.dmeActionCallback:
if self.dmeActionCallback(pahoMessage.topic, data, reqId):
msg = "DME Action successfully completed from Callback"
thread = threading.Thread(
target=self.respondDeviceAction, args=(reqId, 200, msg), name="respondDeviceAction"
)
thread.start()
else:
msg = "Unexpected device error"
thread = threading.Thread(
target=self.respondDeviceAction, args=(reqId, 500, msg), name="respondDeviceAction"
)
thread.start()
else:
thread = threading.Thread(
target=self.respondDeviceAction,
args=(reqId, 501, "Operation not implemented"),
name="respondDeviceAction",
)
thread.start()
|
epl-1.0
|
145ea361c0c78a021c0675ca9b269ad6
| 40.774691
| 137
| 0.590137
| 4.221772
| false
| false
| false
| false
|
ibm-watson-iot/iot-python
|
src/wiotp/sdk/gateway/messages.py
|
2
|
2136
|
# *****************************************************************************
# Copyright (c) 2019 IBM Corporation and other Contributors.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Eclipse Public License v1.0
# which accompanies this distribution, and is available at
# http://www.eclipse.org/legal/epl-v10.html
# *****************************************************************************
import re
from wiotp.sdk import MissingMessageDecoderException, InvalidEventException
COMMAND_RE = re.compile("iot-2/type/(.+)/id/(.+)/cmd/(.+)/fmt/(.+)")
class Command:
def __init__(self, pahoMessage, messageEncoderModules):
result = COMMAND_RE.match(pahoMessage.topic)
if result:
self.typeId = result.group(1)
self.deviceId = result.group(2)
self.commandId = result.group(3)
self.format = result.group(4)
if self.format in messageEncoderModules:
message = messageEncoderModules[self.format].decode(pahoMessage)
self.timestamp = message.timestamp
self.data = message.data
else:
raise MissingMessageDecoderException(self.format)
else:
raise InvalidEventException("Received command on invalid topic: %s" % (pahoMessage.topic))
NOTIFY_RE = re.compile("iot-2/type/(.+)/id/(.+)/notify")
class Notification:
def __init__(self, pahoMessage, messageEncoderModules):
result = NOTIFY_RE.match(pahoMessage.topic)
if result:
self.typeId = result.group(1)
self.deviceId = result.group(2)
self.format = "json"
if self.format in messageEncoderModules:
message = messageEncoderModules[self.format].decode(pahoMessage)
self.timestamp = message.timestamp
self.data = message.data
else:
raise MissingMessageDecoderException(self.format)
else:
raise InvalidEventException("Received notification on invalid topic: %s" % (pahoMessage.topic))
|
epl-1.0
|
0945e4c78d6be15cc3d53a501d7b2024
| 39.301887
| 107
| 0.594569
| 4.377049
| false
| false
| false
| false
|
mbj4668/pyang
|
pyang/transforms/edit.py
|
1
|
12718
|
"""Edit transform plugin
This plugin currently has quite limited functionality. Only some specific
top-level items can be edited, and only existing statements are edited.
"""
import copy
import optparse
import re
import sys
from pyang import error
from pyang import plugin
from pyang import statements
plugin_name = 'edit'
# noinspection PyUnusedLocal
def check_date(option, opt, value):
if not re.match(r'^\d{4}-\d{2}-\d{2}$', value):
raise optparse.OptionValueError(
'option %s: invalid yyyy-mm-dd date: %s' % (opt, value))
return value
class EditOption(optparse.Option):
TYPES = optparse.Option.TYPES + ('date',)
TYPE_CHECKER = copy.copy(optparse.Option.TYPE_CHECKER)
TYPE_CHECKER['date'] = check_date
def pyang_plugin_init():
plugin.register_plugin(EditPlugin())
class EditPlugin(plugin.PyangPlugin):
def add_opts(self, optparser):
optlist = [
# set YANG version (this does nothing if there's no yang-version
# statement)
EditOption("--edit-yang-version", dest="edit_yang_version",
metavar="VERSION",
help="Set YANG version to the supplied value"),
# set namespace (this does nothing if there's no namespace
# statement)
EditOption("--edit-namespace", dest="edit_namespace",
metavar="NAMESPACE",
help="Set YANG namespace to the supplied value"),
# set imported/included module/submodule revision dates
EditOption("--edit-update-import-dates",
dest="edit_update_import_dates", default=False,
action="store_true",
help="Set import/include revision-date "
"statements to match imported/included "
"modules/submodules"),
EditOption("--edit-delete-import-dates",
dest="edit_delete_import_dates", default=False,
action="store_true",
help="Delete import/include revision-date "
"statements"),
# set meta info (these do nothing if there's no corresponding
# metadata statement)
EditOption("--edit-organization", dest="edit_organization",
metavar="ORGANIZATION",
help="Set module/submodule organization "
"to the supplied value"),
EditOption("--edit-contact", dest="edit_contact",
metavar="CONTACT", help="Set module/submodule contact "
"to the supplied value"),
EditOption("--edit-description", dest="edit_description",
metavar="DESCRIPTION",
help="Set module/submodule description "
"to the supplied value"),
# set revision info (these do nothing if there's no revision
# statement)
EditOption("--edit-delete-revisions-after",
dest="edit_delete_revisions_after", type="date",
metavar="PREVDATE",
help="Delete any revisions after "
"the supplied yyyy-mm-dd"),
EditOption("--edit-revision-date", dest="edit_revision_date",
type="date", metavar="DATE",
help="Set most recent revision date "
"to the supplied yyyy-mm-dd"),
EditOption("--edit-revision-description",
dest="edit_revision_description", metavar="DESCRIPTION",
help="Set most recent revision description "
"to the supplied value"),
EditOption("--edit-revision-reference",
dest="edit_revision_reference", metavar="REFERENCE",
help="Set most recent revision reference "
"to the supplied value")
]
g = optparser.add_option_group("Edit transform specific options")
g.add_options(optlist)
def add_transform(self, xforms):
xforms[plugin_name] = self
def transform(self, ctx, modules):
edit_tree(ctx, modules)
def edit_tree(ctx, modules):
def optval(key):
dest = ('%s-%s' % (plugin_name, key)).replace('-', '_')
return getattr(ctx.opts, dest, None)
for module in modules:
for keyword in ['yang-version', 'namespace']:
arg = optval(keyword)
if arg is not None:
update_or_add_stmt(module, keyword, arg)
substmts = []
revision_done = False
for stmt in module.substmts:
replstmts = None
if stmt.keyword in ['import', 'include']:
# XXX should check that these options aren't both set
if ctx.opts.edit_update_import_dates:
update_import_date(ctx, stmt)
elif ctx.opts.edit_delete_import_dates:
delete_import_date(ctx, stmt)
elif stmt.keyword in ['organization', 'contact', 'description']:
arg = optval(stmt.keyword)
if arg is not None:
set_meta_details(ctx, stmt, arg)
elif stmt.keyword == 'revision' and not revision_done:
allrevs = module.search('revision')
lastrev = stmt == allrevs[-1]
replstmts, revision_done = set_revision_details(ctx, stmt,
lastrev)
substmts += [stmt] if replstmts is None else replstmts
# XXX should we tidy up any of the deleted statements?
module.substmts = substmts
def update_import_date(ctx, stmt):
imprev = stmt.search_one('revision-date')
imprevdate = imprev.arg if imprev else None
impmod = ctx.get_module(stmt.arg, imprevdate)
impmodrev = impmod.search_one('revision') if impmod else None
impmodrevdate = impmodrev.arg if impmodrev else None
if not imprev or impmodrevdate > imprevdate:
update_or_add_stmt(stmt, 'revision-date', impmodrevdate)
# noinspection PyUnusedLocal
def delete_import_date(ctx, stmt):
imprev = stmt.search_one('revision-date')
if imprev:
delete_stmt(stmt, imprev)
# noinspection PyUnusedLocal
def set_meta_details(ctx, stmt, arg):
(newarg, ignore) = get_arg_value(arg, stmt.arg)
if newarg is not None:
stmt.arg = newarg
# XXX note that this logic relies on there already being at least one
# revision statement; --lint checks this so it should be OK
def set_revision_details(ctx, stmt, lastrev):
revision_done = False
# relevant options
opts = {
'olddate': ctx.opts.edit_delete_revisions_after,
'newdate': ctx.opts.edit_revision_date,
'description': ctx.opts.edit_revision_description,
'reference': ctx.opts.edit_revision_reference
}
# the logic is quite tricky; here's what we want to achieve:
# * 'olddate' is the date of the oldest revision to be retained; if not
# supplied, any existing revisions are deleted
# * if 'newdate' is supplied, it's the date of the next published
# revision and is to be inserted at the start of any remaining
# revisions
# * reuse rather than delete the oldest revision statement, purely in
# order to retain any blank lines after it
# default action is to do nothing
action = ''
#sys.stderr.write('revision %s (lastrev %s)\n' % (stmt.arg, lastrev))
# only adjust revisions if either olddate or newdate is supplied
olddate = opts.get('olddate', None)
newdate = opts.get('newdate', None)
if olddate is not None or newdate is not None:
# determine whether to delete this old revision
if olddate is None or stmt.arg > olddate:
action = 'delete'
#sys.stderr.write('-> delete (olddate %s)\n' % olddate)
# determine whether to insert the new revision
if newdate is not None and (action != 'delete' or lastrev):
action = 'replace' if action == 'delete' else 'insert'
#sys.stderr.write('-> %s (newdate %s)\n' % (action, newdate))
# if deleting, return an empty list
replstmts = None
if action == 'delete':
replstmts = []
# replace and insert logic is quite similar:
# * if replacing, modify this statement and return a list containing
# only it
# * if inserting, create a new statement and return a list containing
# the new and the original statement
elif action == 'replace' or action == 'insert':
if action == 'replace':
revstmt = stmt
revstmt.arg = newdate
else:
revstmt = statements.new_statement(stmt.top, stmt.parent, None,
'revision', newdate)
other_keywords = set(opts.keys()) - {'olddate', 'newdate'}
for keyword in other_keywords:
update_or_add_stmt(revstmt, keyword, opts[keyword])
if action == 'replace':
replstmts = [revstmt]
else:
replstmts = [revstmt, stmt]
revision_done = True
#sys.stderr.write(
# '= %s\n' % ([s.arg for s in replstmts] if replstmts else None))
return replstmts, revision_done
def get_arg_value(arg, currarg=None):
if arg is None or arg[0] not in ['%', '@']:
return arg, True
else:
replace = False
try:
argval = ''
specs = arg.split('+')
for spec in specs:
if argval != '':
argval += '\n\n'
if spec[0] not in ['%', '@']:
argval += spec
elif spec[0] == '%':
if spec == '%SUMMARY':
summary = get_arg_summary(currarg)
if summary:
argval += summary
elif spec.startswith('%SUBST/'):
(ignore, old, new) = spec.split('/')
if currarg is None:
if argval == '':
argval = None
else:
argval = currarg.replace(old, new)
replace = True
elif spec == '%DELETE':
argval = ''
replace = True
else:
argval += spec
elif spec[0] == '@':
argval += open(spec[1:], 'r').read().rstrip()
return argval, replace
except IOError as e:
raise error.EmitError(str(e))
def get_arg_summary(arg):
lines = arg.splitlines()
summary = ''
prev = ''
discard_prev = False
for line in lines:
if line.strip().startswith('Copyright '):
if prev.strip() == '':
discard_prev = True
break
if prev != '':
summary += prev
prev = ''
if summary != '':
prev += '\n'
prev += line
if prev and not discard_prev:
summary += prev
return summary if summary else 'TBD'
# XXX should insert in canonical order; currently (apart from the hack noted
# below) just appending; should look into doing the same as yang.py, which
# does: substmts = grammar.sort_canonical(stmt.keyword, stmt.substmts)
def update_or_add_stmt(stmt, keyword, arg, index=None):
child = stmt.search_one(keyword)
currarg = child.arg if child else None
(argval, replace) = get_arg_value(arg, currarg)
if argval is None:
child = None
elif child:
if not replace and child.arg and child.arg != argval and child.arg \
!= 'TBD':
sys.stderr.write('%s: not replacing existing %s %r with %r\n' % (
child.pos, keyword, child.arg, argval))
else:
child.arg = argval
else:
child = statements.new_statement(stmt.top, stmt, None, keyword, argval)
if index is None:
index = len(stmt.substmts)
# XXX this hack ensures that 'reference' is always last
if index > 0 and stmt.substmts[index - 1].keyword == 'reference':
index -= 1
stmt.substmts.insert(index, child)
return child
def delete_stmt(parent, stmt):
if stmt in parent.substmts:
idx = parent.substmts.index(stmt)
del parent.substmts[idx]
del stmt
|
isc
|
c7fe489f489e7470546c470f3f6a8e4f
| 36.296188
| 79
| 0.551109
| 4.419041
| false
| false
| false
| false
|
mbj4668/pyang
|
pyang/syntax.py
|
1
|
15064
|
"""Description of YANG & YIN syntax."""
import os
import re
import shlex
import sys
import datetime
### Regular expressions - constraints on arguments
# keywords and identifiers
identifier = r"[_A-Za-z][._\-A-Za-z0-9]*"
prefix = identifier
keyword = '((' + prefix + '):)?(' + identifier + ')'
comment = r'(/\*([^*]|[\r\n\s]|(\*+([^*/]|[\r\n\s])))*\*+/)|(//.*)|(/\*.*)'
# no group version of keyword
keyword_ng = '(?:(' + prefix + '):)?(?:' + identifier + ')'
re_keyword = re.compile(keyword)
re_keyword_start = re.compile('^' + keyword)
re_comment = re.compile(comment)
pos_integer = r"[1-9][0-9]*"
nonneg_integer = r"(0|([1-9][0-9]*))"
integer_ = r"[+-]?" + nonneg_integer
decimal_ = integer_ + r"(\.[0-9]+)?"
length_str = r'((min|max|[0-9]+)\s*' \
r'(\.\.\s*' \
r'(min|max|[0-9]+)\s*)?)'
length_expr = length_str + r'(\|\s*' + length_str + r')*'
re_length_part = re.compile(length_str)
range_str = r'((min|max|((\+|\-)?[0-9]+(\.[0-9]+)?))\s*' \
r'(\.\.\s*' \
r'(min|max|(\+|\-)?[0-9]+(\.[0-9]+)?)\s*)?)'
range_expr = range_str + r'(\|\s*' + range_str + r')*'
re_range_part = re.compile(range_str)
re_identifier = re.compile("^" + identifier + "$")
# path and unique
node_id = keyword_ng
rel_path_keyexpr = r"(\.\./)+(" + node_id + "/)*" + node_id
path_key_expr = r"(current\s*\(\s*\)/" + rel_path_keyexpr + ")"
path_equality_expr = node_id + r"\s*=\s*" + path_key_expr
path_predicate = r"\s*\[\s*" + path_equality_expr + r"\s*\]\s*"
absolute_path_arg = "(?:/" + node_id + "(" + path_predicate + ")*)+"
descendant_path_arg = node_id + "(" + path_predicate + ")*" + \
"(?:" + absolute_path_arg + ")?"
relative_path_arg = r"(\.\./)*" + descendant_path_arg
deref_path_arg = r"deref\s*\(\s*(?:" + relative_path_arg + \
r")\s*\)/\.\./" + relative_path_arg
path_arg = "(" + absolute_path_arg + "|" + relative_path_arg + "|" + \
deref_path_arg + ")"
absolute_schema_nodeid = "(/" + node_id + ")+"
descendant_schema_nodeid = node_id + "(" + absolute_schema_nodeid + ")?"
schema_nodeid = "("+absolute_schema_nodeid+"|"+descendant_schema_nodeid+")"
unique_arg = descendant_schema_nodeid + \
r"(\s+" + descendant_schema_nodeid + r")*"
key_arg = node_id + r"(\s+" + node_id + r")*"
re_schema_node_id_part = re.compile('/' + keyword)
# URI - RFC 3986, Appendix A
scheme = "[A-Za-z][-+.A-Za-z0-9]*"
unreserved = "[-._~A-Za-z0-9]"
pct_encoded = "%[0-9A-F]{2}"
sub_delims = "[!$&'()*+,;=]"
pchar = ("(" + unreserved + "|" + pct_encoded + "|" +
sub_delims + "|[:@])")
segment = pchar + "*"
segment_nz = pchar + "+"
userinfo = ("(" + unreserved + "|" + pct_encoded + "|" +
sub_delims + "|:)*")
dec_octet = "([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])"
ipv4address = "(" + dec_octet + r"\.){3}" + dec_octet
h16 = "[0-9A-F]{1,4}"
ls32 = "(" + h16 + ":" + h16 + "|" + ipv4address + ")"
ipv6address = (
"((" + h16 + ":){6}" + ls32 +
"|::(" + h16 + ":){5}" + ls32 +
"|(" + h16 + ")?::(" + h16 + ":){4}" + ls32 +
"|((" + h16 + ":)?" + h16 + ")?::(" + h16 + ":){3}" + ls32 +
"|((" + h16 + ":){,2}" + h16 + ")?::(" + h16 + ":){2}" + ls32 +
"|((" + h16 + ":){,3}" + h16 + ")?::" + h16 + ":" + ls32 +
"|((" + h16 + ":){,4}" + h16 + ")?::" + ls32 +
"|((" + h16 + ":){,5}" + h16 + ")?::" + h16 +
"|((" + h16 + ":){,6}" + h16 + ")?::)")
ipvfuture = r"v[0-9A-F]+\.(" + unreserved + "|" + sub_delims + "|:)+"
ip_literal = r"\[(" + ipv6address + "|" + ipvfuture + r")\]"
reg_name = "(" + unreserved + "|" + pct_encoded + "|" + sub_delims + ")*"
host = "(" + ip_literal + "|" + ipv4address + "|" + reg_name + ")"
port = "[0-9]*"
authority = "(" + userinfo + "@)?" + host + "(:" + port + ")?"
path_abempty = "(/" + segment + ")*"
path_absolute = "/(" + segment_nz + "(/" + segment + ")*)?"
path_rootless = segment_nz + "(/" + segment + ")*"
path_empty = pchar + "{0}"
hier_part = ("(" + "//" + authority + path_abempty + "|" +
path_absolute + "|" + path_rootless + "|" + path_empty + ")")
query = "(" + pchar + "|[/?])*"
fragment = query
uri = (scheme + ":" + hier_part + r"(\?" + query + ")?" +
"(#" + fragment + ")?")
# Date
date = r"([1-2][0-9]{3})-(0[1-9]|1[012])-(0[1-9]|[12][0-9]|3[01])"
re_nonneg_integer = re.compile("^" + nonneg_integer + "$")
re_integer = re.compile("^" + integer_ + "$")
re_decimal = re.compile("^" + decimal_ + "$")
re_uri = re.compile("^" + uri + "$")
re_boolean = re.compile(r"^(true|false)$")
re_version = re.compile(r"^(1|(1\.1))$")
re_date = re.compile("^" + date +"$")
re_status = re.compile(r"^(current|obsolete|deprecated)$")
re_key = re.compile("^" + key_arg + "$")
re_length = re.compile("^" + length_expr + "$")
re_range = re.compile("^" + range_expr + "$")
re_pos_integer = re.compile(r"^(unbounded|" + pos_integer + r")$")
re_ordered_by = re.compile(r"^(user|system)$")
re_modifier = re.compile(r"^(invert-match)$")
re_node_id = re.compile("^" + node_id + "$")
re_path = re.compile("^" + path_arg + "$")
re_absolute_path = re.compile("^" + absolute_path_arg + "$")
re_unique = re.compile("^" + unique_arg + "$")
re_schema_nodeid = re.compile("^" + schema_nodeid + "$")
re_absolute_schema_nodeid = re.compile("^" + absolute_schema_nodeid + "$")
re_descendant_schema_nodeid = re.compile("^" + descendant_schema_nodeid + "$")
re_deviate = re.compile(r"^(add|delete|replace|not-supported)$")
# Not part of YANG syntax per se but useful for pyang in several places
re_filename = re.compile(
r"^(?:.*" + re.escape(os.sep) + r")?" + # ignore all before os.sep
r"([^@]*?)" + # putative module name
r"(?:@([^.]*?))?" + # putative revision
r"(?:\.yang|\.yin)*" + # foo@bar.yang.yin.yang.yin ?
r"\.(yang|yin)$") # actual final extension
arg_type_map = {
"identifier": lambda s: re_identifier.search(s) is not None,
"non-negative-integer": lambda s: re_nonneg_integer.search(s) is not None,
"integer": lambda s: re_integer.search(s) is not None,
"uri": lambda s: re_uri.search(s) is not None,
"boolean": lambda s: re_boolean.search(s) is not None,
"version": lambda s: re_version.search(s) is not None,
"date": lambda s: chk_date_arg(s),
"status-arg": lambda s: re_status.search(s) is not None,
"key-arg": lambda s: re_key.search(s) is not None,
"length-arg": lambda s: re_length.search(s) is not None,
"range-arg": lambda s: re_range.search(s) is not None,
"max-value": lambda s: re_pos_integer.search(s) is not None,
"ordered-by-arg": lambda s: re_ordered_by.search(s) is not None,
"modifier-arg": lambda s: re_modifier.search(s) is not None,
"identifier-ref": lambda s: re_node_id.search(s) is not None,
"path-arg": lambda s: re_path.search(s) is not None,
"absolute-path-arg": lambda s: re_absolute_path.search(s) is not None,
"unique-arg": lambda s: re_unique.search(s) is not None,
"absolute-schema-nodeid": lambda s: \
re_absolute_schema_nodeid.search(s) is not None,
"descendant-schema-nodeid": lambda s: \
re_descendant_schema_nodeid.search(s) is not None,
"schema-nodeid": lambda s: \
re_schema_nodeid.search(s) is not None,
"enum-arg": lambda s: chk_enum_arg(s),
"fraction-digits-arg": lambda s: chk_fraction_digits_arg(s),
"if-feature-expr": lambda s: chk_if_feature_expr(s),
"deviate-arg": lambda s: re_deviate.search(s) is not None,
"_comment": lambda s: re_comment.search(s) is not None,
}
"""Argument type definitions.
Regular expressions for all argument types except plain string that
are checked directly by the parser.
"""
def chk_date_arg(s):
"""Checks if the string `s` is a valid date string.
Return True of False."""
match = re_date.match(s)
if match is None:
return False
comp = match.groups()
try:
datetime.date(int(comp[0]), int(comp[1]), int(comp[2]))
return True
except ValueError:
return False
def chk_enum_arg(s):
"""Checks if the string `s` is a valid enum string.
Return True or False."""
if len(s) == 0 or s[0].isspace() or s[-1].isspace():
return False
else:
return True
def chk_fraction_digits_arg(s):
"""Checks if the string `s` is a valid fraction-digits argument.
Return True or False."""
try:
v = int(s)
if v >= 1 and v <= 18:
return True
else:
return False
except ValueError:
return False
def chk_if_feature_expr(s):
return parse_if_feature_expr(s) is not None
# if-feature-expr = "(" if-feature-expr ")" /
# if-feature-expr sep boolean-operator sep
# if-feature-expr /
# not-keyword sep if-feature-expr /
# identifier-ref-arg
#
# Rewrite to:
# x = y ("and"/"or" y)*
# y = "not" x /
# "(" x ")"
# identifier
#
# Expr :: ('not', Expr, None)
# | ('and'/'or', Expr, Expr)
# | Identifier
def parse_if_feature_expr(s):
try:
# Encoding to ascii works for valid if-feature-exprs, since all
# pars are YANG identifiers (or the boolean keywords).
# The reason for this fix is that in Python < 2.7.3, shlex would return
# erroneous tokens if a unicode string was passed.
# Also, shlex uses cStringIO internally which doesn't handle unicode
# characters outside the ascii range anyway.
if sys.version < '3':
sx = shlex.shlex(s.encode("ascii"))
else:
sx = shlex.shlex(s)
except UnicodeEncodeError:
return None
sx.wordchars += ":-" # need to handle prefixes and '-' in the name
operators = [None]
operands = []
precedence = {'not':3, 'and':2, 'or':1, None:0}
def x():
y()
tok = sx.get_token()
while tok in ('and', 'or'):
push_operator(tok)
y()
tok = sx.get_token()
sx.push_token(tok)
while operators[-1] is not None:
pop_operator()
def y():
tok = sx.get_token()
if tok == 'not':
push_operator(tok)
x()
elif tok == '(':
operators.append(None)
x()
tok = sx.get_token()
if tok != ')':
raise ValueError
operators.pop()
elif is_identifier(tok):
operands.append(tok)
else:
raise ValueError
def push_operator(op):
while op_gt(operators[-1], op):
pop_operator()
operators.append(op)
def pop_operator():
op = operators.pop()
if op == 'not':
operands.append((op, operands.pop(), None))
else:
operands.append((op, operands.pop(), operands.pop()))
def op_gt(op1, op2):
return precedence[op1] > precedence[op2]
def is_identifier(tok):
return re_node_id.search(tok) is not None
try:
x()
if sx.get_token() != '':
raise ValueError
return operands[-1]
except ValueError:
return None
def add_arg_type(arg_type, regexp):
"""Add a new arg_type to the map.
Used by extension plugins to register their own argument types."""
arg_type_map[arg_type] = regexp
# keyword argument-name yin-element
yin_map = \
{'action': ('name', False),
'anydata': ('name', False),
'anyxml': ('name', False),
'argument': ('name', False),
'augment': ('target-node', False),
'base': ('name', False),
'belongs-to': ('module', False),
'bit': ('name', False),
'case': ('name', False),
'choice': ('name', False),
'config': ('value', False),
'contact': ('text', True),
'container': ('name', False),
'default': ('value', False),
'description': ('text', True),
'deviate': ('value', False),
'deviation': ('target-node', False),
'enum': ('name', False),
'error-app-tag': ('value', False),
'error-message': ('value', True),
'extension': ('name', False),
'feature': ('name', False),
'fraction-digits': ('value', False),
'grouping': ('name', False),
'identity': ('name', False),
'if-feature': ('name', False),
'import': ('module', False),
'include': ('module', False),
'input': (None, None),
'key': ('value', False),
'leaf': ('name', False),
'leaf-list': ('name', False),
'length': ('value', False),
'list': ('name', False),
'mandatory': ('value', False),
'max-elements': ('value', False),
'min-elements': ('value', False),
'modifier': ('value', False),
'module': ('name', False),
'must': ('condition', False),
'namespace': ('uri', False),
'notification': ('name', False),
'ordered-by': ('value', False),
'organization': ('text', True),
'output': (None, None),
'path': ('value', False),
'pattern': ('value', False),
'position': ('value', False),
'presence': ('value', False),
'prefix': ('value', False),
'range': ('value', False),
'reference': ('text', True),
'refine': ('target-node', False),
'require-instance': ('value', False),
'revision': ('date', False),
'revision-date': ('date', False),
'rpc': ('name', False),
'status': ('value', False),
'submodule': ('name', False),
'type': ('name', False),
'typedef': ('name', False),
'unique': ('tag', False),
'units': ('name', False),
'uses': ('name', False),
'value': ('value', False),
'when': ('condition', False),
'yang-version': ('value', False),
'yin-element': ('value', False),
}
"""Mapping of statements to the YIN representation of their arguments.
The values are pairs whose first component specifies whether the
argument is stored in a subelement and the second component is the
name of the attribute or subelement carrying the argument. See YANG
specification.
"""
|
isc
|
433a69ad9ffd4ba16f37c8f4cb504c13
| 37.925065
| 79
| 0.497411
| 3.225696
| false
| false
| false
| false
|
mbj4668/pyang
|
pyang/translators/yin.py
|
1
|
6251
|
"""YIN output plugin"""
from xml.sax.saxutils import quoteattr
from xml.sax.saxutils import escape
import optparse
import re
from .. import plugin
from .. import util
from .. import grammar
from .. import syntax
from .. import statements
yin_namespace = "urn:ietf:params:xml:ns:yang:yin:1"
def pyang_plugin_init():
plugin.register_plugin(YINPlugin())
class YINPlugin(plugin.PyangPlugin):
def add_opts(self, optparser):
optlist = [
optparse.make_option("--yin-canonical",
dest="yin_canonical",
action="store_true",
help="Print in canonical order"),
optparse.make_option("--yin-pretty-strings",
dest="yin_pretty_strings",
action="store_true",
help="Pretty print strings"),
]
g = optparser.add_option_group("YIN output specific options")
g.add_options(optlist)
def add_output_format(self, fmts):
fmts['yin'] = self
def emit(self, ctx, modules, fd):
module = modules[0]
emit_yin(ctx, module, fd)
def emit_yin(ctx, module, fd):
fd.write('<?xml version="1.0" encoding="UTF-8"?>\n')
fd.write('<%s name="%s"\n' % (module.keyword, module.arg))
fd.write(' ' * len(module.keyword) + ' xmlns="%s"' % yin_namespace)
prefix = module.search_one('prefix')
if prefix is not None:
namespace = module.search_one('namespace')
fd.write('\n')
fd.write(' ' * len(module.keyword))
fd.write(' xmlns:' + prefix.arg + '=' +
quoteattr(namespace.arg))
else:
belongs_to = module.search_one('belongs-to')
if belongs_to is not None:
prefix = belongs_to.search_one('prefix')
if prefix is not None:
# read the parent module in order to find the namespace uri
res = ctx.read_module(belongs_to.arg, extra={'no_include':True})
if res is not None:
namespace = res.search_one('namespace')
if namespace is None or namespace.arg is None:
pass
else:
# success - namespace found
fd.write('\n')
fd.write(' ' * len(module.keyword))
fd.write(' xmlns:' + prefix.arg + '=' +
quoteattr(namespace.arg))
for imp in module.search('import'):
prefix = imp.search_one('prefix')
if prefix is not None:
rev = None
r = imp.search_one('revision-date')
if r is not None:
rev = r.arg
mod = statements.modulename_to_module(module, imp.arg, rev)
if mod is not None:
ns = mod.search_one('namespace')
if ns is not None:
fd.write('\n')
fd.write(' ' * len(module.keyword))
fd.write(' xmlns:' + prefix.arg + '=' +
quoteattr(ns.arg))
fd.write('>\n')
if ctx.opts.yin_canonical:
substmts = grammar.sort_canonical(module.keyword, module.substmts)
else:
substmts = module.substmts
for s in substmts:
emit_stmt(ctx, module, s, fd, ' ', ' ')
fd.write('</%s>\n' % module.keyword)
def emit_stmt(ctx, module, stmt, fd, indent, indentstep):
if util.is_prefixed(stmt.raw_keyword):
# this is an extension. need to find its definition
(prefix, identifier) = stmt.raw_keyword
tag = prefix + ':' + identifier
if stmt.i_extension is not None:
ext_arg = stmt.i_extension.search_one('argument')
if ext_arg is not None:
yin_element = ext_arg.search_one('yin-element')
if yin_element is not None and yin_element.arg == 'true':
argname = prefix + ':' + ext_arg.arg
argiselem = True
else:
# explicit false or no yin-element given
argname = ext_arg.arg
argiselem = False
else:
argiselem = False
argname = None
else:
argiselem = False
argname = None
else:
(argname, argiselem) = syntax.yin_map[stmt.raw_keyword]
tag = stmt.raw_keyword
if argiselem is False or argname is None:
if argname is None:
attr = ''
else:
attr = ' ' + argname + '=' + quoteattr(stmt.arg)
if len(stmt.substmts) == 0:
fd.write(indent + '<' + tag + attr + '/>\n')
else:
fd.write(indent + '<' + tag + attr + '>\n')
for s in stmt.substmts:
emit_stmt(ctx, module, s, fd, indent + indentstep,
indentstep)
fd.write(indent + '</' + tag + '>\n')
else:
fd.write(indent + '<' + tag + '>\n')
if ctx.opts.yin_pretty_strings:
# since whitespace is significant in XML, the current
# code is strictly speaking incorrect. But w/o the whitespace,
# it looks too ugly.
fd.write(indent + indentstep + '<' + argname + '>\n')
fd.write(fmt_text(indent + indentstep + indentstep, stmt.arg))
fd.write('\n' + indent + indentstep + '</' + argname + '>\n')
else:
fd.write(indent + indentstep + '<' + argname + '>' + \
escape(stmt.arg) + \
'</' + argname + '>\n')
if ctx.opts.yin_canonical:
substmts = grammar.sort_canonical(stmt.keyword, stmt.substmts)
else:
substmts = stmt.substmts
for s in substmts:
emit_stmt(ctx, module, s, fd, indent + indentstep, indentstep)
fd.write(indent + '</' + tag + '>\n')
def fmt_text(indent, data):
res = []
for line in re.split("(\n)", escape(data)):
if line == '':
continue
if line == '\n':
res.extend(line)
else:
res.extend(indent + line)
return ''.join(res)
|
isc
|
9a2d0ef23d5688834407018ea4306364
| 37.58642
| 80
| 0.503279
| 3.976463
| false
| false
| false
| false
|
rdegges/django-twilio
|
test_project/settings.py
|
1
|
6114
|
# -*- coding: utf-8 -*-
import sys
# Django settings for test_project project.
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import django
import packaging.version
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
DEBUG = True
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'db.sqlite3',
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Los_Angeles'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'j1wd@qqodn-r9h&o@0jj!uw^#pm5wcdu2^cdsax=hm+-mk705p'
# This is a temporary shim to allow the old style MIDDLEWARE_CLASSES to work
# We will forge a plan to remove at least the unsupported versions soon.
# Django 2.0 is the future, but 1.11 is still supported.
# This test_project though is simple enough that the restrictions are small.
if packaging.version.Version(django.__version__) < packaging.version.Version('2.0'):
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
else:
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'test_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'django.contrib.admindocs',
# django-twilio, of course!
'django_twilio',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
# django-twilio account credentials. These fields are required to use the REST
# API (initiate outbound calls and SMS messages).
TWILIO_ACCOUNT_SID = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
TWILIO_AUTH_TOKEN = 'YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY'
# The default callerid will be used for all outgoing phone calls and SMS
# messages if not explicitly specified. This number must be previously
# validated with twilio in order to work. See
# https://www.twilio.com/user/account/phone-numbers#
TWILIO_DEFAULT_CALLERID = 'NNNNNNNNNN'
|
unlicense
|
69e54dec409d3927e831977f003e6393
| 31.870968
| 84
| 0.697089
| 3.73945
| false
| false
| false
| false
|
mozilla-services/buildhub
|
jobs/tests/test_lambda_s3_event_functional.py
|
1
|
4418
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
import unittest
import os
import json
import kinto_http
from decouple import config
from buildhub import lambda_s3_event
here = os.path.dirname(__file__)
server = config('SERVER_URL', default='http://localhost:8888/v1')
bid = 'build-hub'
cid = 'releases'
class LambdaTest(unittest.TestCase):
def setUp(self):
filename = os.path.join(here, 'data', 's3-event-simple.json')
self.event = json.load(open(filename, 'r'))
def test_load_into_kinto(self):
lambda_s3_event.lambda_handler(self.event, None)
rid = 'firefox_54-0_win64_fr'
client = kinto_http.Client(server_url=server)
record = client.get_record(bucket=bid, collection=cid, id=rid)['data']
record.pop('last_modified')
assert record == {
'id': 'firefox_54-0_win64_fr',
'source': {
'repository': (
'https://hg.mozilla.org/releases/mozilla-release'
),
'revision': 'e832ed037a3c23004be73178e546d240e57b6ee1',
'product': 'firefox',
'tree': 'releases/mozilla-release'
},
'download': {
'mimetype': 'application/msdos-windows',
'url': 'https://archive.mozilla.org/pub/firefox/releases/'
'54.0/win64/fr/Firefox Setup 54.0.exe',
'size': 51001024,
'date': '2017-08-08T17:06:52Z'
},
'target': {
'locale': 'fr',
'platform': 'win64',
'os': 'win',
'version': '54.0',
'channel': 'release'
},
'build': {
'as': 'ml64.exe',
'cc': (
'c:/builds/moz2_slave/m-rel-w64-00000000000000000000/'
'build/src/vs2015u3/VC/bin/amd64/cl.exe'
),
'cxx': (
'c:/builds/moz2_slave/m-rel-w64-00000000000000000000/'
'build/src/vs2015u3/VC/bin/amd64/cl.exe'
),
'date': '2017-06-08T10:58:25Z',
'host': 'x86_64-pc-mingw32',
'id': '20170608105825',
'number': 3,
'target': 'x86_64-pc-mingw32'
}
}
rid = 'firefox_nightly_2017-10-29-22-01-12_58-0a1_linux-i686_en-us'
record = client.get_record(bucket=bid, collection=cid, id=rid)['data']
record.pop('last_modified')
assert record == {
'build': {
'as': '$(CC)',
'cc': (
'/usr/bin/ccache '
'/builds/worker/workspace/build/src/gcc/bin/gcc -m32 '
'-march=pentium-m -std=gnu99'
),
'cxx': (
'/usr/bin/ccache '
'/builds/worker/workspace/build/src/gcc/bin/g++ -m32 '
'-march=pentium-m -std=gnu++11'
),
'date': '2017-10-29T22:01:12Z',
'host': 'i686-pc-linux-gnu',
'id': '20171029220112',
'target': 'i686-pc-linux-gnu',
},
'download': {
'date': '2017-10-29T17:06:52Z',
'mimetype': 'application/x-bzip2',
'size': 51001024,
'url': (
'https://archive.mozilla.org/pub/firefox/nightly/2017/10/'
'2017-10-29-22-01-12-mozilla-central/firefox-58.0a1.'
'en-US.linux-i686.tar.bz2'
)
},
'id': (
'firefox_nightly_2017-10-29-22-01-12_58-0a1_linux-i686_en-us'
),
'source': {
'product': 'firefox',
'repository': 'https://hg.mozilla.org/mozilla-central',
'revision': 'd3910b7628b8066d3f30d58b17b5824b05768854',
'tree': 'mozilla-central'
},
'target': {
'channel': 'nightly',
'locale': 'en-US',
'os': 'linux',
'platform': 'linux-i686',
'version': '58.0a1'
}
}
|
mpl-2.0
|
1517965c5c64a352e8b73259eff50045
| 34.629032
| 78
| 0.463105
| 3.571544
| false
| false
| false
| false
|
pikepdf/pikepdf
|
src/pikepdf/models/encryption.py
|
1
|
5651
|
# SPDX-FileCopyrightText: 2022 James R. Barlow
# SPDX-License-Identifier: MPL-2.0
"""For managing PDF encryption."""
from __future__ import annotations
import sys
from typing import TYPE_CHECKING, Any, NamedTuple, cast
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal # pragma: no cover
if TYPE_CHECKING:
from pikepdf._qpdf import EncryptionMethod
class Permissions(NamedTuple):
"""
Stores the user-level permissions for an encrypted PDF.
A compliant PDF reader/writer should enforce these restrictions on people
who have the user password and not the owner password. In practice, either
password is sufficient to decrypt all document contents. A person who has
the owner password should be allowed to modify the document in any way.
pikepdf does not enforce the restrictions in any way; it is up to application
developers to enforce them as they see fit.
Unencrypted PDFs implicitly have all permissions allowed. Permissions can
only be changed when a PDF is saved.
"""
accessibility: bool = True
"""Can users use screen readers and accessibility tools to read the PDF?"""
extract: bool = True
"""Can users extract contents?"""
modify_annotation: bool = True
"""Can users modify annotations?"""
modify_assembly: bool = False
"""Can users arrange document contents?"""
modify_form: bool = True
"""Can users fill out forms?"""
modify_other: bool = True
"""Can users modify the document?"""
print_lowres: bool = True
"""Can users print the document at low resolution?"""
print_highres: bool = True
"""Can users print the document at high resolution?"""
DEFAULT_PERMISSIONS = Permissions()
class EncryptionInfo:
"""
Reports encryption information for an encrypted PDF.
This information may not be changed, except when a PDF is saved.
This object is not used to specify the encryption settings to save
a PDF, due to non-overlapping information requirements.
"""
def __init__(self, encdict: dict[str, Any]):
"""
Initialize EncryptionInfo.
Generally pikepdf will initialize and return it.
Args:
encdict: Python dictionary containing encryption settings.
"""
self._encdict = encdict
@property
def R(self) -> int:
"""Revision number of the security handler."""
return int(self._encdict['R'])
@property
def V(self) -> int:
"""Version of PDF password algorithm."""
return int(self._encdict['V'])
@property
def P(self) -> int:
"""Return encoded permission bits.
See :meth:`Pdf.allow` instead.
"""
return int(self._encdict['P'])
@property
def stream_method(self) -> EncryptionMethod:
"""Encryption method used to encode streams."""
return cast('EncryptionMethod', self._encdict['stream'])
@property
def string_method(self) -> EncryptionMethod:
"""Encryption method used to encode strings."""
return cast('EncryptionMethod', self._encdict['string'])
@property
def file_method(self) -> EncryptionMethod:
"""Encryption method used to encode the whole file."""
return cast('EncryptionMethod', self._encdict['file'])
@property
def user_password(self) -> bytes:
"""If possible, return the user password.
The user password can only be retrieved when a PDF is opened
with the owner password and when older versions of the
encryption algorithm are used.
The password is always returned as ``bytes`` even if it has
a clear Unicode representation.
"""
return bytes(self._encdict['user_passwd'])
@property
def encryption_key(self) -> bytes:
"""Return the RC4 or AES encryption key used for this file."""
return bytes(self._encdict['encryption_key'])
@property
def bits(self) -> int:
"""Return the number of bits in the encryption algorithm.
e.g. if the algorithm is AES-256, this returns 256.
"""
return len(self._encdict['encryption_key']) * 8
class Encryption(NamedTuple):
"""Specify the encryption settings to apply when a PDF is saved."""
owner: str = ''
"""The owner password to use. This allows full control
of the file. If blank, the PDF will be encrypted and
present as "(SECURED)" in PDF viewers. If the owner password
is blank, the user password should be as well."""
user: str = ''
"""The user password to use. With this password, some
restrictions will be imposed by a typical PDF reader.
If blank, the PDF can be opened by anyone, but only modified
as allowed by the permissions in ``allow``."""
R: Literal[2, 3, 4, 5, 6] = 6
"""Select the security handler algorithm to use. Choose from:
``2``, ``3``, ``4`` or ``6``. By default, the highest version of
is selected (``6``). ``5`` is a deprecated algorithm that should
not be used."""
allow: Permissions = DEFAULT_PERMISSIONS
"""The permissions to set.
If omitted, all permissions are granted to the user."""
aes: bool = True
"""If True, request the AES algorithm. If False, use RC4.
If omitted, AES is selected whenever possible (R >= 4)."""
metadata: bool = True
"""If True, also encrypt the PDF metadata. If False,
metadata is not encrypted. Reading document metadata without
decryption may be desirable in some cases. Requires ``aes=True``.
If omitted, metadata is encrypted whenever possible."""
|
mpl-2.0
|
21e03933294d545d53727875d93a860f
| 31.107955
| 81
| 0.660414
| 4.418296
| false
| false
| false
| false
|
End of preview. Expand
in Data Studio
No dataset card yet
- Downloads last month
- 10